ngram
listlengths
0
82k
[ "mock_method) ) @pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports): orig_import =", "MonkeyPatch() if request.param == \"pyrepl\": try: import pyrepl.readline # noqa:", "(coverage) # with `sys.settrace(None)`. def settrace(func): if func is None:", "\"\"\" assert sys.gettrace() is _orig_trace orig_settrace = sys.settrace # Wrap", "request.param == \"pyrepl\": try: import pyrepl.readline # noqa: F401 except", "any real ~/.pdbrc.py then, and seems to be required also", "import_mock(name, *args): if name in mocked_imports: raise ImportError return orig_import(name,", "# Wrap sys.settrace to restore original tracing function (coverage) #", "tracking. \"\"\" assert sys.gettrace() is _orig_trace orig_settrace = sys.settrace #", "_pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() if request.param == \"pyrepl\":", "= sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def term(): \"\"\"Configure TERM for predictable", "tmpdir = request.getfixturevalue(\"testdir\").tmpdir else: tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\",", "*args): if name in mocked_imports: raise ImportError return orig_import(name, *args)", "orig_settrace = sys.settrace # Wrap sys.settrace to restore original tracing", "_orig_trace = None def pytest_configure(): global _orig_trace _orig_trace = sys.gettrace()", "to re-enable coverage tracking. \"\"\" assert sys.gettrace() is _orig_trace orig_settrace", "\"pyrepl\": readline = \"pyrepl.readline\" else: assert readline_param == \"readline\" readline", "%s)\" % (method, args, kwargs)) for mock_method in (\"set_trace\", \"set_continue\"):", "mock_method, functools.partial(mock, mock_method) ) @pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports):", "is _orig_trace orig_settrace = sys.settrace # Wrap sys.settrace to restore", "request.param @pytest.fixture def monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch readline to return", "mock_method in (\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock, mock_method)", "restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after test run. This is required to", "on py27, where it would read contents from ~/.pdbrc?!. \"\"\"", "temporary directory. This ignores any real ~/.pdbrc.py then, and seems", "ImportError as exc: pytest.skip(msg=\"pyrepl not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else:", "readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" %", "= MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield m m.undo() # if _orig_trace", "monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda: endidx)", "pytest _orig_trace = None def pytest_configure(): global _orig_trace _orig_trace =", "tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def readline_param(request): from _pytest.monkeypatch import MonkeyPatch", "\"pyrepl.readline\" else: assert readline_param == \"readline\" readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\"", "= sys.gettrace() if newtrace is not _orig_trace: sys.settrace(_orig_trace) assert newtrace", "begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda: endidx) return inner @pytest.fixture def", "MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield m m.undo() # if _orig_trace and", "def pytest_configure(): global _orig_trace _orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def", "a temporary directory. This ignores any real ~/.pdbrc.py then, and", "m = MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield m m.undo() # if", "% mock_method, functools.partial(mock, mock_method) ) @pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager def", "= sys.settrace # Wrap sys.settrace to restore original tracing function", "available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param @pytest.fixture", "contents from ~/.pdbrc?!. \"\"\" # Use tmpdir from testdir, if", "for mock_method in (\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock,", "with `sys.settrace(None)`. def settrace(func): if func is None: orig_settrace(_orig_trace) else:", "monkeypatch_pdb_methods(monkeypatch): def mock(method, *args, **kwargs): print(\"=== %s(%s, %s)\" % (method,", "HOME in a temporary directory. This ignores any real ~/.pdbrc.py", "pytest.skip(msg=\"pyrepl not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return", "monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports): orig_import = __import__ def import_mock(name, *args):", "return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3, 6)) def tmphome(request, monkeypatch): \"\"\"Set", "given results.\"\"\" def inner(line, begidx, endidx): if readline_param == \"pyrepl\":", "with monkeypatch.context() as m: if sys.version_info >= (3,): m.setattr('builtins.__import__', import_mock)", "endidx): if readline_param == \"pyrepl\": readline = \"pyrepl.readline\" else: assert", "% readline, lambda: endidx) return inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def", "orig_import(name, *args) with monkeypatch.context() as m: if sys.version_info >= (3,):", "TERM for predictable output from Pygments.\"\"\" from _pytest.monkeypatch import MonkeyPatch", "from ~/.pdbrc?!. \"\"\" # Use tmpdir from testdir, if it", "global _orig_trace _orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def term(): \"\"\"Configure", "def readline_param(request): from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() if", "test run. This is required to re-enable coverage tracking. \"\"\"", "tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def readline_param(request): from _pytest.monkeypatch", "autouse=True) def term(): \"\"\"Configure TERM for predictable output from Pygments.\"\"\"", "yield m m.undo() # if _orig_trace and not hasattr(sys, \"pypy_version_info\"):", "monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock, mock_method) ) @pytest.fixture def monkeypatch_importerror(monkeypatch):", "directory. This ignores any real ~/.pdbrc.py then, and seems to", "\"\"\"(Re)store sys.gettrace after test run. This is required to re-enable", "if func is None: orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield", "@pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after test run. This is", "settrace(func): if func is None: orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace)", "\"\"\"Set up HOME in a temporary directory. This ignores any", "~/.pdbrc?!. \"\"\" # Use tmpdir from testdir, if it is", ") @pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports): orig_import = __import__", "= __import__ def import_mock(name, *args): if name in mocked_imports: raise", "`sys.settrace(None)`. def settrace(func): if func is None: orig_settrace(_orig_trace) else: orig_settrace(func)", "@pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3, 6)) def", "m.setenv(\"TERM\", \"xterm-256color\") yield m m.undo() # if _orig_trace and not", "def import_mock(name, *args): if name in mocked_imports: raise ImportError return", "def term(): \"\"\"Configure TERM for predictable output from Pygments.\"\"\" from", "return request.param @pytest.fixture def monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch readline to", "tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd(): yield", "sys.gettrace() if newtrace is not _orig_trace: sys.settrace(_orig_trace) assert newtrace is", "and seems to be required also with linecache on py27,", "= \"pyrepl.readline\" else: assert readline_param == \"readline\" readline = \"readline\"", "newtrace = sys.gettrace() if newtrace is not _orig_trace: sys.settrace(_orig_trace) assert", "is not _orig_trace: sys.settrace(_orig_trace) assert newtrace is None @pytest.fixture(scope=\"session\") def", "original tracing function (coverage) # with `sys.settrace(None)`. def settrace(func): if", "coverage tracking. \"\"\" assert sys.gettrace() is _orig_trace orig_settrace = sys.settrace", "from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield", "in mocked_imports: raise ImportError return orig_import(name, *args) with monkeypatch.context() as", "to restore original tracing function (coverage) # with `sys.settrace(None)`. def", "__import__ def import_mock(name, *args): if name in mocked_imports: raise ImportError", "*args, **kwargs): print(\"=== %s(%s, %s)\" % (method, args, kwargs)) for", "@pytest.fixture def monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch readline to return given", "settrace) yield newtrace = sys.gettrace() if newtrace is not _orig_trace:", "@pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports): orig_import = __import__ def", "pytest_configure(): global _orig_trace _orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def term():", "tmphome(request, monkeypatch): \"\"\"Set up HOME in a temporary directory. This", "py27, where it would read contents from ~/.pdbrc?!. \"\"\" #", "m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param @pytest.fixture def monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch", "with tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def readline_param(request): from", "name in mocked_imports: raise ImportError return orig_import(name, *args) with monkeypatch.context()", "lambda: line) monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline,", "readline_param(request): from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() if request.param", "Wrap sys.settrace to restore original tracing function (coverage) # with", "real ~/.pdbrc.py then, and seems to be required also with", "@pytest.fixture(scope=\"session\", autouse=True) def term(): \"\"\"Configure TERM for predictable output from", "str(tmpdir)) with tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def readline_param(request):", "monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace = sys.gettrace() if newtrace is not", "\"readline\" readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\"", "pyrepl.readline # noqa: F401 except ImportError as exc: pytest.skip(msg=\"pyrepl not", "with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after test", "< (3, 6)) def tmphome(request, monkeypatch): \"\"\"Set up HOME in", "import functools import sys from contextlib import contextmanager import pytest", "sys.version_info >= (3,): m.setattr('builtins.__import__', import_mock) else: m.setattr('__builtin__.__import__', import_mock) yield m", "and not hasattr(sys, \"pypy_version_info\"): # Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True)", "orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace = sys.gettrace() if newtrace is", "sys.settrace(_orig_trace) assert newtrace is None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\")", "== \"pyrepl\": try: import pyrepl.readline # noqa: F401 except ImportError", "args, kwargs)) for mock_method in (\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" %", "else: tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd():", "request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir else: tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir))", "except ImportError as exc: pytest.skip(msg=\"pyrepl not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True)", "as exc: pytest.skip(msg=\"pyrepl not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\",", "m.undo() # if _orig_trace and not hasattr(sys, \"pypy_version_info\"): # Fails", "~/.pdbrc.py then, and seems to be required also with linecache", "for predictable output from Pygments.\"\"\" from _pytest.monkeypatch import MonkeyPatch m", "not hasattr(sys, \"pypy_version_info\"): # Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def", "monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def", "(3, 6)) def tmphome(request, monkeypatch): \"\"\"Set up HOME in a", "# Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace", "_orig_trace _orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def term(): \"\"\"Configure TERM", "\"\"\" # Use tmpdir from testdir, if it is used.", "readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\" %", "it would read contents from ~/.pdbrc?!. \"\"\" # Use tmpdir", "print(\"=== %s(%s, %s)\" % (method, args, kwargs)) for mock_method in", "tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3, 6)) def tmphome(request, monkeypatch): \"\"\"Set up", "hasattr(sys, \"pypy_version_info\"): # Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch):", "assert sys.gettrace() is _orig_trace orig_settrace = sys.settrace # Wrap sys.settrace", "Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after", "where it would read contents from ~/.pdbrc?!. \"\"\" # Use", "readline_param == \"readline\" readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda:", "import contextmanager import pytest _orig_trace = None def pytest_configure(): global", "False) return request.param @pytest.fixture def monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch readline", "sys.settrace # Wrap sys.settrace to restore original tracing function (coverage)", "newtrace is not _orig_trace: sys.settrace(_orig_trace) assert newtrace is None @pytest.fixture(scope=\"session\")", "# if _orig_trace and not hasattr(sys, \"pypy_version_info\"): # Fails with", "F401 except ImportError as exc: pytest.skip(msg=\"pyrepl not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\",", "sys from contextlib import contextmanager import pytest _orig_trace = None", "= request.getfixturevalue(\"testdir\").tmpdir else: tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir))", "return given results.\"\"\" def inner(line, begidx, endidx): if readline_param ==", "@contextmanager def cm(mocked_imports): orig_import = __import__ def import_mock(name, *args): if", "def cm(mocked_imports): orig_import = __import__ def import_mock(name, *args): if name", "else: assert readline_param == \"readline\" readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" %", "monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda: begidx)", "assert newtrace is None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info", "# Use tmpdir from testdir, if it is used. if", "if name in mocked_imports: raise ImportError return orig_import(name, *args) with", "_orig_trace orig_settrace = sys.settrace # Wrap sys.settrace to restore original", "as m: if sys.version_info >= (3,): m.setattr('builtins.__import__', import_mock) else: m.setattr('__builtin__.__import__',", "\"pypy_version_info\"): # Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store", "\"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda:", "\"testdir\" in request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir else: tmpdir = request.getfixturevalue(\"_tmphome_path\")", "request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\",", "term(): \"\"\"Configure TERM for predictable output from Pygments.\"\"\" from _pytest.monkeypatch", "required also with linecache on py27, where it would read", "def monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports): orig_import = __import__ def import_mock(name,", "sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def term(): \"\"\"Configure TERM for predictable output", "*args) with monkeypatch.context() as m: if sys.version_info >= (3,): m.setattr('builtins.__import__',", "# with `sys.settrace(None)`. def settrace(func): if func is None: orig_settrace(_orig_trace)", "line) monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda:", "mock(method, *args, **kwargs): print(\"=== %s(%s, %s)\" % (method, args, kwargs))", "mocked_imports: raise ImportError return orig_import(name, *args) with monkeypatch.context() as m:", "== \"pyrepl\": readline = \"pyrepl.readline\" else: assert readline_param == \"readline\"", "MonkeyPatch m = MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield m m.undo() #", "readline, lambda: endidx) return inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def mock(method,", "def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3, 6)) def tmphome(request,", "monkeypatch.context() as m: if sys.version_info >= (3,): m.setattr('builtins.__import__', import_mock) else:", "try: import pyrepl.readline # noqa: F401 except ImportError as exc:", "{}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param @pytest.fixture def", "up HOME in a temporary directory. This ignores any real", "This ignores any real ~/.pdbrc.py then, and seems to be", "sys.settrace to restore original tracing function (coverage) # with `sys.settrace(None)`.", "@pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def readline_param(request): from _pytest.monkeypatch import MonkeyPatch m", "it is used. if \"testdir\" in request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir", "readline = \"pyrepl.readline\" else: assert readline_param == \"readline\" readline =", "% readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda: endidx) return", "from contextlib import contextmanager import pytest _orig_trace = None def", "inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def mock(method, *args, **kwargs): print(\"=== %s(%s,", "output from Pygments.\"\"\" from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch()", "in (\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock, mock_method) )", "def mock(method, *args, **kwargs): print(\"=== %s(%s, %s)\" % (method, args,", "if readline_param == \"pyrepl\": readline = \"pyrepl.readline\" else: assert readline_param", "is None: orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace =", "request.getfixturevalue(\"testdir\").tmpdir else: tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with", "_orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True) def term(): \"\"\"Configure TERM for", "\"\"\"Patch readline to return given results.\"\"\" def inner(line, begidx, endidx):", "orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace = sys.gettrace() if", "monkeypatch): \"\"\"Set up HOME in a temporary directory. This ignores", "endidx) return inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def mock(method, *args, **kwargs):", "tracing function (coverage) # with `sys.settrace(None)`. def settrace(func): if func", "True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param @pytest.fixture def monkeypatch_readline(request, monkeypatch,", "@pytest.fixture(autouse=sys.version_info < (3, 6)) def tmphome(request, monkeypatch): \"\"\"Set up HOME", "= MonkeyPatch() if request.param == \"pyrepl\": try: import pyrepl.readline #", "from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() if request.param ==", "None def pytest_configure(): global _orig_trace _orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\", autouse=True)", "m m.undo() # if _orig_trace and not hasattr(sys, \"pypy_version_info\"): #", "= None def pytest_configure(): global _orig_trace _orig_trace = sys.gettrace() @pytest.fixture(scope=\"session\",", "if _orig_trace and not hasattr(sys, \"pypy_version_info\"): # Fails with PyPy2", "is required to re-enable coverage tracking. \"\"\" assert sys.gettrace() is", "%s(%s, %s)\" % (method, args, kwargs)) for mock_method in (\"set_trace\",", "if sys.version_info >= (3,): m.setattr('builtins.__import__', import_mock) else: m.setattr('__builtin__.__import__', import_mock) yield", "_orig_trace and not hasattr(sys, \"pypy_version_info\"): # Fails with PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?!", "= \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\" % readline,", "monkeypatch, readline_param): \"\"\"Patch readline to return given results.\"\"\" def inner(line,", "return orig_import(name, *args) with monkeypatch.context() as m: if sys.version_info >=", "if newtrace is not _orig_trace: sys.settrace(_orig_trace) assert newtrace is None", "lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda: endidx) return inner @pytest.fixture", "predictable output from Pygments.\"\"\" from _pytest.monkeypatch import MonkeyPatch m =", "inner(line, begidx, endidx): if readline_param == \"pyrepl\": readline = \"pyrepl.readline\"", "m = MonkeyPatch() if request.param == \"pyrepl\": try: import pyrepl.readline", "func is None: orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace", "(3,): m.setattr('builtins.__import__', import_mock) else: m.setattr('__builtin__.__import__', import_mock) yield m return cm", "This is required to re-enable coverage tracking. \"\"\" assert sys.gettrace()", "in request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir else: tmpdir = request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\",", "raise ImportError return orig_import(name, *args) with monkeypatch.context() as m: if", "from testdir, if it is used. if \"testdir\" in request.fixturenames:", "\"pyrepl\": try: import pyrepl.readline # noqa: F401 except ImportError as", "import MonkeyPatch m = MonkeyPatch() if request.param == \"pyrepl\": try:", "read contents from ~/.pdbrc?!. \"\"\" # Use tmpdir from testdir,", "ImportError return orig_import(name, *args) with monkeypatch.context() as m: if sys.version_info", "\"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock, mock_method) ) @pytest.fixture def", "begidx, endidx): if readline_param == \"pyrepl\": readline = \"pyrepl.readline\" else:", "contextlib import contextmanager import pytest _orig_trace = None def pytest_configure():", "\"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock, mock_method) ) @pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager", "scope=\"session\") def readline_param(request): from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch()", "PyPy2 (https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after test run.", "not _orig_trace: sys.settrace(_orig_trace) assert newtrace is None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory):", "after test run. This is required to re-enable coverage tracking.", "readline_param): \"\"\"Patch readline to return given results.\"\"\" def inner(line, begidx,", "assert readline_param == \"readline\" readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline,", "else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace = sys.gettrace() if newtrace", "run. This is required to re-enable coverage tracking. \"\"\" assert", "readline to return given results.\"\"\" def inner(line, begidx, endidx): if", "would read contents from ~/.pdbrc?!. \"\"\" # Use tmpdir from", "monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda: endidx) return inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch):", "sys.gettrace after test run. This is required to re-enable coverage", "def tmphome(request, monkeypatch): \"\"\"Set up HOME in a temporary directory.", "results.\"\"\" def inner(line, begidx, endidx): if readline_param == \"pyrepl\": readline", "# noqa: F401 except ImportError as exc: pytest.skip(msg=\"pyrepl not available:", "import MonkeyPatch m = MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield m m.undo()", "def settrace(func): if func is None: orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\",", "is None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3,", "with linecache on py27, where it would read contents from", "kwargs)) for mock_method in (\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method,", "Pygments.\"\"\" from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\")", "\"readline\"), scope=\"session\") def readline_param(request): from _pytest.monkeypatch import MonkeyPatch m =", "\"xterm-256color\") yield m m.undo() # if _orig_trace and not hasattr(sys,", "to return given results.\"\"\" def inner(line, begidx, endidx): if readline_param", "function (coverage) # with `sys.settrace(None)`. def settrace(func): if func is", "is used. if \"testdir\" in request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir else:", "tmpdir from testdir, if it is used. if \"testdir\" in", "def monkeypatch_pdb_methods(monkeypatch): def mock(method, *args, **kwargs): print(\"=== %s(%s, %s)\" %", "cm(mocked_imports): orig_import = __import__ def import_mock(name, *args): if name in", "_orig_trace: sys.settrace(_orig_trace) assert newtrace is None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return", "% (method, args, kwargs)) for mock_method in (\"set_trace\", \"set_continue\"): monkeypatch.setattr(", "be required also with linecache on py27, where it would", "not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param", "(method, args, kwargs)) for mock_method in (\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\"", "functools.partial(mock, mock_method) ) @pytest.fixture def monkeypatch_importerror(monkeypatch): @contextmanager def cm(mocked_imports): orig_import", "from Pygments.\"\"\" from _pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() m.setenv(\"TERM\",", "_tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3, 6)) def tmphome(request, monkeypatch):", "None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info < (3, 6))", "monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"),", "also with linecache on py27, where it would read contents", "if \"testdir\" in request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir else: tmpdir =", "m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param @pytest.fixture def monkeypatch_readline(request,", "None: orig_settrace(_orig_trace) else: orig_settrace(func) monkeypatch.setattr(\"sys.settrace\", settrace) yield newtrace = sys.gettrace()", "testdir, if it is used. if \"testdir\" in request.fixturenames: tmpdir", "orig_import = __import__ def import_mock(name, *args): if name in mocked_imports:", "noqa: F401 except ImportError as exc: pytest.skip(msg=\"pyrepl not available: {}\".format(exc))", "contextmanager import pytest _orig_trace = None def pytest_configure(): global _orig_trace", "ignores any real ~/.pdbrc.py then, and seems to be required", "== \"readline\" readline = \"readline\" monkeypatch.setattr(\"%s.get_line_buffer\" % readline, lambda: line)", "to be required also with linecache on py27, where it", "functools import sys from contextlib import contextmanager import pytest _orig_trace", "seems to be required also with linecache on py27, where", "yield newtrace = sys.gettrace() if newtrace is not _orig_trace: sys.settrace(_orig_trace)", "used. if \"testdir\" in request.fixturenames: tmpdir = request.getfixturevalue(\"testdir\").tmpdir else: tmpdir", "sys.gettrace() is _orig_trace orig_settrace = sys.settrace # Wrap sys.settrace to", "str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd(): yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\")", "def inner(line, begidx, endidx): if readline_param == \"pyrepl\": readline =", "if it is used. if \"testdir\" in request.fixturenames: tmpdir =", "MonkeyPatch m = MonkeyPatch() if request.param == \"pyrepl\": try: import", "% readline, lambda: line) monkeypatch.setattr(\"%s.get_begidx\" % readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\"", "import pyrepl.readline # noqa: F401 except ImportError as exc: pytest.skip(msg=\"pyrepl", "\"\"\"Configure TERM for predictable output from Pygments.\"\"\" from _pytest.monkeypatch import", "return inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def mock(method, *args, **kwargs): print(\"===", "import sys from contextlib import contextmanager import pytest _orig_trace =", "6)) def tmphome(request, monkeypatch): \"\"\"Set up HOME in a temporary", "re-enable coverage tracking. \"\"\" assert sys.gettrace() is _orig_trace orig_settrace =", "= request.getfixturevalue(\"_tmphome_path\") monkeypatch.setenv(\"HOME\", str(tmpdir)) monkeypatch.setenv(\"USERPROFILE\", str(tmpdir)) with tmpdir.as_cwd(): yield tmpdir", "@pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def mock(method, *args, **kwargs): print(\"=== %s(%s, %s)\"", "in a temporary directory. This ignores any real ~/.pdbrc.py then,", "required to re-enable coverage tracking. \"\"\" assert sys.gettrace() is _orig_trace", "(\"set_trace\", \"set_continue\"): monkeypatch.setattr( \"pdb.pdb.Pdb.%s\" % mock_method, functools.partial(mock, mock_method) ) @pytest.fixture", "else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False) return request.param @pytest.fixture def monkeypatch_readline(request, monkeypatch, readline_param):", "def monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch readline to return given results.\"\"\"", "newtrace is None @pytest.fixture(scope=\"session\") def _tmphome_path(tmpdir_factory): return tmpdir_factory.mktemp(\"tmphome\") @pytest.fixture(autouse=sys.version_info <", "(https://travis-ci.org/antocuni/pdb/jobs/509624590)?! @pytest.fixture(autouse=True) def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after test run. This", "restore original tracing function (coverage) # with `sys.settrace(None)`. def settrace(func):", "m: if sys.version_info >= (3,): m.setattr('builtins.__import__', import_mock) else: m.setattr('__builtin__.__import__', import_mock)", "import pytest _orig_trace = None def pytest_configure(): global _orig_trace _orig_trace", "Use tmpdir from testdir, if it is used. if \"testdir\"", ">= (3,): m.setattr('builtins.__import__', import_mock) else: m.setattr('__builtin__.__import__', import_mock) yield m return", "def restore_settrace(monkeypatch): \"\"\"(Re)store sys.gettrace after test run. This is required", "exc: pytest.skip(msg=\"pyrepl not available: {}\".format(exc)) m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", True) else: m.setattr(\"fancycompleter.DefaultConfig.prefer_pyrepl\", False)", "linecache on py27, where it would read contents from ~/.pdbrc?!.", "then, and seems to be required also with linecache on", "yield tmpdir @pytest.fixture(params=(\"pyrepl\", \"readline\"), scope=\"session\") def readline_param(request): from _pytest.monkeypatch import", "_pytest.monkeypatch import MonkeyPatch m = MonkeyPatch() m.setenv(\"TERM\", \"xterm-256color\") yield m", "monkeypatch_readline(request, monkeypatch, readline_param): \"\"\"Patch readline to return given results.\"\"\" def", "readline_param == \"pyrepl\": readline = \"pyrepl.readline\" else: assert readline_param ==", "lambda: endidx) return inner @pytest.fixture def monkeypatch_pdb_methods(monkeypatch): def mock(method, *args,", "**kwargs): print(\"=== %s(%s, %s)\" % (method, args, kwargs)) for mock_method", "readline, lambda: begidx) monkeypatch.setattr(\"%s.get_endidx\" % readline, lambda: endidx) return inner", "if request.param == \"pyrepl\": try: import pyrepl.readline # noqa: F401" ]
[ "raw_width // 2 crop_size = raw_height // crop_zoom, raw_width //", "crop_img(img, relative_corners): \"\"\" relative_corners are floats between 0 and 1", "center = raw_height // 2, raw_width // 2 crop_size =", "def crop_img(img, relative_corners): \"\"\" relative_corners are floats between 0 and", "the full image, [[0.5, 0.5], [1, 1]] would be bottom", "center. \"\"\" raw_height, raw_width = img.shape[:2] center = raw_height //", "raw_height)] bottom_right_pix = [int(rc[1][0] * raw_width), int(rc[1][1] * raw_height)] img_cropped", "min_x:max_x] return img_cropped def crop_img(img, relative_corners): \"\"\" relative_corners are floats", "is amount to \"zoom\" into the image. E.g. 2.0 would", "box should be ([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]). e.g. [[0, 0],", "would be bottom right.\"\"\" rc = relative_corners raw_height, raw_width =", "[1, 1]] would be bottom right.\"\"\" rc = relative_corners raw_height,", "= raw_height // crop_zoom, raw_width // crop_zoom min_y, max_y =", "0], [1, 1]] would be the full image, [[0.5, 0.5],", "are floats between 0 and 1 designating where the corners", "min_x, max_x = int(center[1] - crop_size[1] // 2), int(center[1] +", "where the corners of a crop box should be ([[top_left_x,", "crop_zoom): \"\"\" crop_zoom is amount to \"zoom\" into the image.", "int(center[1] + crop_size[1] // 2) img_cropped = img[min_y:max_y, min_x:max_x] return", "designating where the corners of a crop box should be", "E.g. 2.0 would cut out half of the width, half", "raw_height, raw_width = img.shape[:2] center = raw_height // 2, raw_width", "// crop_zoom min_y, max_y = int(center[0] - crop_size[0] // 2),", "// 2), int(center[0] + crop_size[0] // 2) min_x, max_x =", "raw_height, raw_width = img.shape[:2] top_left_pix = [int(rc[0][0] * raw_width), int(rc[0][1]", "bottom_right_pix = [int(rc[1][0] * raw_width), int(rc[1][1] * raw_height)] img_cropped =", "half of the height, and only give the center. \"\"\"", "crop_size[1] // 2) img_cropped = img[min_y:max_y, min_x:max_x] return img_cropped def", "amount to \"zoom\" into the image. E.g. 2.0 would cut", "between 0 and 1 designating where the corners of a", "crop_size[1] // 2), int(center[1] + crop_size[1] // 2) img_cropped =", "width, half of the height, and only give the center.", "raw_height // 2, raw_width // 2 crop_size = raw_height //", "[[0.5, 0.5], [1, 1]] would be bottom right.\"\"\" rc =", "[int(rc[0][0] * raw_width), int(rc[0][1] * raw_height)] bottom_right_pix = [int(rc[1][0] *", "center_crop_img(img, crop_zoom): \"\"\" crop_zoom is amount to \"zoom\" into the", "crop_size[0] // 2), int(center[0] + crop_size[0] // 2) min_x, max_x", "1]] would be the full image, [[0.5, 0.5], [1, 1]]", "and only give the center. \"\"\" raw_height, raw_width = img.shape[:2]", "0 and 1 designating where the corners of a crop", "corners of a crop box should be ([[top_left_x, top_left_y], [bottom_right_x,", "crop_size[0] // 2) min_x, max_x = int(center[1] - crop_size[1] //", "to \"zoom\" into the image. E.g. 2.0 would cut out", "* raw_width), int(rc[0][1] * raw_height)] bottom_right_pix = [int(rc[1][0] * raw_width),", "* raw_width), int(rc[1][1] * raw_height)] img_cropped = img[top_left_pix[1]:bottom_right_pix[1], top_left_pix[0]:bottom_right_pix[0]] return", "be bottom right.\"\"\" rc = relative_corners raw_height, raw_width = img.shape[:2]", "2) img_cropped = img[min_y:max_y, min_x:max_x] return img_cropped def crop_img(img, relative_corners):", "* raw_height)] bottom_right_pix = [int(rc[1][0] * raw_width), int(rc[1][1] * raw_height)]", "height, and only give the center. \"\"\" raw_height, raw_width =", "env utilties. \"\"\" def center_crop_img(img, crop_zoom): \"\"\" crop_zoom is amount", "min_y, max_y = int(center[0] - crop_size[0] // 2), int(center[0] +", "cut out half of the width, half of the height,", "// 2) img_cropped = img[min_y:max_y, min_x:max_x] return img_cropped def crop_img(img,", "relative_corners): \"\"\" relative_corners are floats between 0 and 1 designating", "img.shape[:2] center = raw_height // 2, raw_width // 2 crop_size", "// 2 crop_size = raw_height // crop_zoom, raw_width // crop_zoom", "rc = relative_corners raw_height, raw_width = img.shape[:2] top_left_pix = [int(rc[0][0]", "int(rc[0][1] * raw_height)] bottom_right_pix = [int(rc[1][0] * raw_width), int(rc[1][1] *", "img.shape[:2] top_left_pix = [int(rc[0][0] * raw_width), int(rc[0][1] * raw_height)] bottom_right_pix", "int(center[1] - crop_size[1] // 2), int(center[1] + crop_size[1] // 2)", "raw_width = img.shape[:2] top_left_pix = [int(rc[0][0] * raw_width), int(rc[0][1] *", "2) min_x, max_x = int(center[1] - crop_size[1] // 2), int(center[1]", "crop_zoom is amount to \"zoom\" into the image. E.g. 2.0", "and 1 designating where the corners of a crop box", "+ crop_size[0] // 2) min_x, max_x = int(center[1] - crop_size[1]", "= [int(rc[0][0] * raw_width), int(rc[0][1] * raw_height)] bottom_right_pix = [int(rc[1][0]", "= img.shape[:2] center = raw_height // 2, raw_width // 2", "\"\"\" Various generic env utilties. \"\"\" def center_crop_img(img, crop_zoom): \"\"\"", "raw_height // crop_zoom, raw_width // crop_zoom min_y, max_y = int(center[0]", "of a crop box should be ([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]).", "be the full image, [[0.5, 0.5], [1, 1]] would be", "the center. \"\"\" raw_height, raw_width = img.shape[:2] center = raw_height", "- crop_size[0] // 2), int(center[0] + crop_size[0] // 2) min_x,", "raw_width = img.shape[:2] center = raw_height // 2, raw_width //", "top_left_y], [bottom_right_x, bottom_right_y]]). e.g. [[0, 0], [1, 1]] would be", "bottom_right_y]]). e.g. [[0, 0], [1, 1]] would be the full", "= int(center[1] - crop_size[1] // 2), int(center[1] + crop_size[1] //", "+ crop_size[1] // 2) img_cropped = img[min_y:max_y, min_x:max_x] return img_cropped", "image, [[0.5, 0.5], [1, 1]] would be bottom right.\"\"\" rc", "max_x = int(center[1] - crop_size[1] // 2), int(center[1] + crop_size[1]", "\"\"\" crop_zoom is amount to \"zoom\" into the image. E.g.", "[[0, 0], [1, 1]] would be the full image, [[0.5,", "right.\"\"\" rc = relative_corners raw_height, raw_width = img.shape[:2] top_left_pix =", "// 2) min_x, max_x = int(center[1] - crop_size[1] // 2),", "= relative_corners raw_height, raw_width = img.shape[:2] top_left_pix = [int(rc[0][0] *", "crop_zoom, raw_width // crop_zoom min_y, max_y = int(center[0] - crop_size[0]", "int(center[0] + crop_size[0] // 2) min_x, max_x = int(center[1] -", "raw_width // crop_zoom min_y, max_y = int(center[0] - crop_size[0] //", "would cut out half of the width, half of the", "\"\"\" def center_crop_img(img, crop_zoom): \"\"\" crop_zoom is amount to \"zoom\"", "the corners of a crop box should be ([[top_left_x, top_left_y],", "// 2), int(center[1] + crop_size[1] // 2) img_cropped = img[min_y:max_y,", "0.5], [1, 1]] would be bottom right.\"\"\" rc = relative_corners", "top_left_pix = [int(rc[0][0] * raw_width), int(rc[0][1] * raw_height)] bottom_right_pix =", "\"zoom\" into the image. E.g. 2.0 would cut out half", "image. E.g. 2.0 would cut out half of the width,", "of the height, and only give the center. \"\"\" raw_height,", "floats between 0 and 1 designating where the corners of", "utilties. \"\"\" def center_crop_img(img, crop_zoom): \"\"\" crop_zoom is amount to", "a crop box should be ([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]). e.g.", "= [int(rc[1][0] * raw_width), int(rc[1][1] * raw_height)] img_cropped = img[top_left_pix[1]:bottom_right_pix[1],", "max_y = int(center[0] - crop_size[0] // 2), int(center[0] + crop_size[0]", "generic env utilties. \"\"\" def center_crop_img(img, crop_zoom): \"\"\" crop_zoom is", "raw_width), int(rc[0][1] * raw_height)] bottom_right_pix = [int(rc[1][0] * raw_width), int(rc[1][1]", "// crop_zoom, raw_width // crop_zoom min_y, max_y = int(center[0] -", "2), int(center[0] + crop_size[0] // 2) min_x, max_x = int(center[1]", "should be ([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]). e.g. [[0, 0], [1,", "the image. E.g. 2.0 would cut out half of the", "relative_corners are floats between 0 and 1 designating where the", "raw_width), int(rc[1][1] * raw_height)] img_cropped = img[top_left_pix[1]:bottom_right_pix[1], top_left_pix[0]:bottom_right_pix[0]] return img_cropped", "// 2, raw_width // 2 crop_size = raw_height // crop_zoom,", "img_cropped def crop_img(img, relative_corners): \"\"\" relative_corners are floats between 0", "2 crop_size = raw_height // crop_zoom, raw_width // crop_zoom min_y,", "2), int(center[1] + crop_size[1] // 2) img_cropped = img[min_y:max_y, min_x:max_x]", "= img[min_y:max_y, min_x:max_x] return img_cropped def crop_img(img, relative_corners): \"\"\" relative_corners", "Various generic env utilties. \"\"\" def center_crop_img(img, crop_zoom): \"\"\" crop_zoom", "img[min_y:max_y, min_x:max_x] return img_cropped def crop_img(img, relative_corners): \"\"\" relative_corners are", "e.g. [[0, 0], [1, 1]] would be the full image,", "<gh_stars>1-10 \"\"\" Various generic env utilties. \"\"\" def center_crop_img(img, crop_zoom):", "only give the center. \"\"\" raw_height, raw_width = img.shape[:2] center", "of the width, half of the height, and only give", "into the image. E.g. 2.0 would cut out half of", "([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]). e.g. [[0, 0], [1, 1]] would", "the width, half of the height, and only give the", "out half of the width, half of the height, and", "2, raw_width // 2 crop_size = raw_height // crop_zoom, raw_width", "full image, [[0.5, 0.5], [1, 1]] would be bottom right.\"\"\"", "the height, and only give the center. \"\"\" raw_height, raw_width", "[int(rc[1][0] * raw_width), int(rc[1][1] * raw_height)] img_cropped = img[top_left_pix[1]:bottom_right_pix[1], top_left_pix[0]:bottom_right_pix[0]]", "= raw_height // 2, raw_width // 2 crop_size = raw_height", "return img_cropped def crop_img(img, relative_corners): \"\"\" relative_corners are floats between", "bottom right.\"\"\" rc = relative_corners raw_height, raw_width = img.shape[:2] top_left_pix", "1 designating where the corners of a crop box should", "half of the width, half of the height, and only", "= img.shape[:2] top_left_pix = [int(rc[0][0] * raw_width), int(rc[0][1] * raw_height)]", "1]] would be bottom right.\"\"\" rc = relative_corners raw_height, raw_width", "[1, 1]] would be the full image, [[0.5, 0.5], [1,", "crop_size = raw_height // crop_zoom, raw_width // crop_zoom min_y, max_y", "relative_corners raw_height, raw_width = img.shape[:2] top_left_pix = [int(rc[0][0] * raw_width),", "- crop_size[1] // 2), int(center[1] + crop_size[1] // 2) img_cropped", "img_cropped = img[min_y:max_y, min_x:max_x] return img_cropped def crop_img(img, relative_corners): \"\"\"", "be ([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]). e.g. [[0, 0], [1, 1]]", "= int(center[0] - crop_size[0] // 2), int(center[0] + crop_size[0] //", "crop box should be ([[top_left_x, top_left_y], [bottom_right_x, bottom_right_y]]). e.g. [[0,", "2.0 would cut out half of the width, half of", "\"\"\" raw_height, raw_width = img.shape[:2] center = raw_height // 2,", "[bottom_right_x, bottom_right_y]]). e.g. [[0, 0], [1, 1]] would be the", "give the center. \"\"\" raw_height, raw_width = img.shape[:2] center =", "crop_zoom min_y, max_y = int(center[0] - crop_size[0] // 2), int(center[0]", "\"\"\" relative_corners are floats between 0 and 1 designating where", "would be the full image, [[0.5, 0.5], [1, 1]] would", "int(center[0] - crop_size[0] // 2), int(center[0] + crop_size[0] // 2)", "def center_crop_img(img, crop_zoom): \"\"\" crop_zoom is amount to \"zoom\" into" ]
[ "def project(self): return mock.Mock() def isValidOrigin(self, origin, inputs): with mock.patch('sentry.utils.http.get_origins')", "get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result =", "test_project(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result =", "is False def test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result", "['*.example.com']) self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result,", "def test_full_uri_match(self): result = self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self):", "result = self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self): result =", "self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False)", "GetOriginsTestCase(TestCase): def test_project_default(self): project = Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result =", "self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com'])", "class IsValidOriginTestCase(TestCase): @fixture def project(self): return mock.Mock() def isValidOrigin(self, origin,", "result = self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self): result =", "True result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result is False def", "['example.com:80']) assert result is True def test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80',", "assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.0/8', '192.168.1.0/8'])", "self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*.example.com'])", "['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result,", "absolute_uri, is_valid_ip, ) class AbsoluteUriTest(TestCase): def test_without_path(self): assert absolute_uri() ==", "def test_without_path(self): assert absolute_uri() == options.get('system.url-prefix') def test_with_path(self): assert absolute_uri('/foo/bar')", "'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar' url2 =", "result = self.isValidOrigin('null', ['*']) self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self): result =", "get_origins.assert_called_once_with(self.project) return result def test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result,", "= 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar' url2", "frozenset([])) def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None) self.assertEquals(result, frozenset(['*']))", "def test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self):", "is_valid_ip, ) class AbsoluteUriTest(TestCase): def test_without_path(self): assert absolute_uri() == options.get('system.url-prefix')", "= self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80',", "test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False) def test_full_uri_match(self): result", "from sentry.testutils import TestCase from sentry.utils.http import ( is_same_domain, is_valid_origin,", "result = get_origins(project) self.assertEquals(result, frozenset(['*'])) def test_project(self): project = Project.objects.get()", "@fixture def project(self): return mock.Mock() def isValidOrigin(self, origin, inputs): with", "import fixture from sentry import options from sentry.models import Project", "result = self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self): result =", "test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self): result", "result = self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self): result =", "self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar', ['example.com'])", "= self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar',", "def test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False) def test_full_uri_match(self):", "is True result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result is True", "test_project_default(self): project = Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result,", "[u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert", "True result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result is True result", "['.']) assert result is False class IsValidIPTestCase(TestCase): def is_valid_ip(self, ip,", "is True def test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result", "test_unparseable_uri(self): result = self.isValidOrigin('http://example.com', ['.']) assert result is False class", "True result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result is False def", "assert result is False class IsValidIPTestCase(TestCase): def is_valid_ip(self, ip, inputs):", "test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self): result", "self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True)", "self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz'", "test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result is True def", "<gh_stars>1-10 # -*- coding: utf-8 -*- from __future__ import absolute_import", "self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result is False def test_unicode(self): result =", "import ( is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip, ) class AbsoluteUriTest(TestCase):", "= self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80',", "self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1'])", "= self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com',", "self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz'", "assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result", "def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None) self.assertEquals(result, frozenset(['*'])) def", "['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result,", "True result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result is True def", "result = self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self): result =", "self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result is True result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing'])", "self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['*.example.com'])", "self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip, self.project) def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', [])", "sentry.utils.http import ( is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip, ) class", "with self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None) self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self): with", "__future__ import absolute_import import mock from exam import fixture from", "get_origins(None) self.assertEquals(result, frozenset([])) def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None)", "= get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture def project(self): return", "options from sentry.models import Project from sentry.testutils import TestCase from", "self.assertEquals(result, False) def test_full_uri_match(self): result = self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True)", "'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase): def test_project_default(self):", "self.assertEquals(result, True) def test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True)", "is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip, self.project) def test_not_in_blacklist(self):", "def test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self):", "['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:80']) assert", "'127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8']) assert not", "False) def test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result is", "['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self): assert", "= self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80',", "self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True)", "inputs) return is_valid_ip(ip, self.project) def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', []) assert", "True def test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result is", "def test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0',", "test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class", "def test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain(self):", "self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['*'])) def test_project(self): project =", "result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result is True result =", "is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result is True", "test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result is True result", "def test_project_default(self): project = Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project)", "self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False)", "self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a'])", "self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self): assert", "url1 = 'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self):", "is_valid_ip(ip, self.project) def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0',", "assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self): assert not", "url2)) def test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1,", "def test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True) def test_partial_uri_match(self):", "[u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def", "assert result is True result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result", "def test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2))", "assert result is False def test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost'])", "test_with_path(self): assert absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def", "def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None) self.assertEquals(result, frozenset([])) def", "assert result is False def test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar'])", "test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1',", "= self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result is False def test_unicode(self): result", "= self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result is True def test_base_domain_does_not_match_domain_with_invalid_port(self): result", "assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1'])", "mock.Mock() def isValidOrigin(self, origin, inputs): with mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value", "result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result is True result =", "with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture", "self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a'])", "= get_origins(project) self.assertEquals(result, frozenset(['*'])) def test_project(self): project = Project.objects.get() project.update_option('sentry:origins',", "Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example']))", "[]) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self): assert not", "assert result is True def test_unparseable_uri(self): result = self.isValidOrigin('http://example.com', ['.'])", "result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result is True result =", "url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar'", "= 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar' url2", "def is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip, self.project) def", "not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1',", "self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase): def test_project_default(self): project = Project.objects.get() with", "= self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False) def test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar',", "% (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def test_is_same_domain(self): url1 = 'http://example.com/foo/bar' url2", "result is True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result is", "import Project from sentry.testutils import TestCase from sentry.utils.http import (", "self.project) get_origins.assert_called_once_with(self.project) return result def test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*'])", "['sp://*']) assert result is True result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert", "is_valid_origin, get_origins, absolute_uri, is_valid_ip, ) class AbsoluteUriTest(TestCase): def test_without_path(self): assert", "test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result is True result", "= 'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self): url1", "# -*- coding: utf-8 -*- from __future__ import absolute_import import", "ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip, self.project) def test_not_in_blacklist(self): assert", "self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False)", "= self.isValidOrigin('http://example.com', ['.']) assert result is False class IsValidIPTestCase(TestCase): def", "test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False) def test_custom_protocol_with_location(self): result", "= self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result is True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar',", "['foo.example.com']) self.assertEquals(result, False) def test_full_uri_match(self): result = self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result,", "test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result is True result", "get_origins: get_origins.return_value = inputs result = is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return", "IsValidOriginTestCase(TestCase): @fixture def project(self): return mock.Mock() def isValidOrigin(self, origin, inputs):", "import mock from exam import fixture from sentry import options", "is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return result def test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com',", "= inputs result = is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return result def", "origin, inputs): with mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value = inputs result", "= self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result is False def test_base_domain_does_not_match_subdomain(self): result", "from sentry.utils.http import ( is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip, )", "self.assertEquals(result, True) def test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True)", "['*']) self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result,", "= self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result is True result = self.isValidOrigin('dp://custom-thing/foo/bar',", "Project from sentry.testutils import TestCase from sentry.utils.http import ( is_same_domain,", "import TestCase from sentry.utils.http import ( is_same_domain, is_valid_origin, get_origins, absolute_uri,", "[u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self):", "= self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin('http://l\\xc3\\xb8calhost',", "def test_null_valid_with_global(self): result = self.isValidOrigin('null', ['*']) self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self):", "options.get('system.url-prefix') def test_with_path(self): assert absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),) class", "inputs): with mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value = inputs result =", "self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture def project(self): return mock.Mock() def", "self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None) self.assertEquals(result, frozenset([])) def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'):", "project = Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['*']))", "frozenset(['*'])) def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None) self.assertEquals(result, frozenset(['http://example.com']))", "result = self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self): result =", "result is True result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result is", "-*- from __future__ import absolute_import import mock from exam import", "['sp://custom-thing']) assert result is False def test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar',", "= self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a',", "frozenset(['*'])) def test_project(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None):", "False def test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result is", "from exam import fixture from sentry import options from sentry.models", "frozenset(['http://foo.example'])) def test_project_and_setting(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'):", "as get_origins: get_origins.return_value = inputs result = is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project)", "def isValidOrigin(self, origin, inputs): with mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value =", "assert result is True result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result", "url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase): def test_project_default(self): project", "class AbsoluteUriTest(TestCase): def test_without_path(self): assert absolute_uri() == options.get('system.url-prefix') def test_with_path(self):", "False def test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result is", "project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project)", "test_full_uri_match(self): result = self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self): result", "result = self.isValidOrigin('http://example.com', ['.']) assert result is False class IsValidIPTestCase(TestCase):", "return is_valid_ip(ip, self.project) def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1',", "url1 = 'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self):", "False) def test_full_uri_match(self): result = self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True) def", "test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain(self): result", "result def test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True) def", "[u'l\\xf8calhost']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert", "result is True def test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:443']) assert", "test_project_and_setting(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result =", "assert self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self):", ") class AbsoluteUriTest(TestCase): def test_without_path(self): assert absolute_uri() == options.get('system.url-prefix') def", "'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self): url1 =", "with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self): project", "SameDomainTestCase(TestCase): def test_is_same_domain(self): url1 = 'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1,", "test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self): result", "self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result is True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar'])", "result is True result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result is", "True def test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result is", "self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self):", "self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with", "result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True) def test_null_valid_with_global(self): result =", "= 'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self): url1", "test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def", "test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True) def test_null_valid_with_global(self): result", "test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self): result", "test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self): result", "test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True) def test_partial_uri_match(self): result", "get_origins, absolute_uri, is_valid_ip, ) class AbsoluteUriTest(TestCase): def test_without_path(self): assert absolute_uri()", "def test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self):", "assert result is True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result", "'10.0.0.0/8']) def test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1',", "['sp://*.foobar']) assert result is False def test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost',", "test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result is False def", "result is False def test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result,", "def test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self):", "'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_port(self): url1 =", "def test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self):", "self.isValidOrigin('http://example.com', ['http://example.com']) self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com', ['http://example.com'])", "self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result is True def test_punycode(self): result =", "True def test_unparseable_uri(self): result = self.isValidOrigin('http://example.com', ['.']) assert result is", "True) def test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False) def", "test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def", "def test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True) def test_null_valid_with_global(self):", "= get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self): project = Project.objects.get() project.update_option('sentry:origins',", "from sentry.models import Project from sentry.testutils import TestCase from sentry.utils.http", "sentry.testutils import TestCase from sentry.utils.http import ( is_same_domain, is_valid_origin, get_origins,", "def test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self):", "Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example',", "result = get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture def project(self):", "False def test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False) def", "def test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result is True", "def test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result is True", "self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result", "self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True)", "self.isValidOrigin('http://example.com', ['.']) assert result is False class IsValidIPTestCase(TestCase): def is_valid_ip(self,", "url2)) def test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1,", "self.assertEquals(result, frozenset([])) def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None) self.assertEquals(result,", "= get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result", "self.assertEquals(result, False) def test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result", "absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def test_is_same_domain(self): url1", "test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result is True def", "frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None) self.assertEquals(result,", "self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result is True def test_unparseable_uri(self): result =", "self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result is True result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://'])", "= 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase): def test_project_default(self): project =", "['*.example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result,", "True) def test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True) def", "url1 = 'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase):", "self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None) self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'):", "False def test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result is", "get_origins(None) self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None)", "with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self):", "result = self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True) def test_partial_uri_match(self): result =", "result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result is True result =", "sentry.models import Project from sentry.testutils import TestCase from sentry.utils.http import", "get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example'])", "get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture def project(self): return mock.Mock()", "with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['*'])) def test_project(self): project", "return mock.Mock() def isValidOrigin(self, origin, inputs): with mock.patch('sentry.utils.http.get_origins') as get_origins:", "absolute_uri() == options.get('system.url-prefix') def test_with_path(self): assert absolute_uri('/foo/bar') == '%s/foo/bar' %", "= self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result is True result = self.isValidOrigin('sp://custom-thing-two/foo/bar',", "result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result is False def test_custom_protocol_with_domainish_match(self):", "= self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True) def test_null_valid_with_global(self): result = self.isValidOrigin('null',", "is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip, ) class AbsoluteUriTest(TestCase): def test_without_path(self):", "test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self): result", "self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result is True def test_base_domain_does_not_match_domain_with_invalid_port(self): result =", "= self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin(u'http://l\\xf8calhost',", "test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result is True result", "result is False def test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert", "( is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip, ) class AbsoluteUriTest(TestCase): def", "['http://example.com']) self.assertEquals(result, True) def test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result,", "def test_is_same_domain(self): url1 = 'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2))", "sentry import options from sentry.models import Project from sentry.testutils import", "'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase): def test_project_default(self): project = Project.objects.get()", "self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com', ['*.example.com'])", "def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class", "assert result is True result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result", "self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80'])", "def test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert result is True", "result = get_origins(None) self.assertEquals(result, frozenset([])) def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result", "self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None) self.assertEquals(result,", "True) def test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False) def", "result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result is False def test_unicode(self):", "result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result is True result =", "assert result is True def test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:443'])", "self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8'])", "test_null_valid_with_global(self): result = self.isValidOrigin('null', ['*']) self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self): result", "is True result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result is True", "def test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self):", "True) def test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True) def", "is True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result is False", "def test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self):", "True result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result is True result", "result = self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result is False def test_base_domain_does_not_match_subdomain(self):", "self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result is False def test_custom_protocol_with_domainish_match(self): result =", "= self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a:80',", "self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True) def test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com'])", "= self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com',", "True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result is True result", "mock from exam import fixture from sentry import options from", "True) def test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False) def", "def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8'])", "['sp://custom-thing']) assert result is True result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert", "[u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert", "with mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value = inputs result = is_valid_origin(origin,", "= 'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2)) class GetOriginsTestCase(TestCase): def", "absolute_import import mock from exam import fixture from sentry import", "True) def test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result is", "is True result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result is False", "get_origins.return_value = inputs result = is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return result", "def test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result is True", "utf-8 -*- from __future__ import absolute_import import mock from exam", "test_full_uri_match_requires_scheme(self): result = self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self): result", "False) def test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True) def", "result = self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False) def test_custom_protocol_with_location(self): result =", "True) def test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True) def", "result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result is", "project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project)", "result is False def test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert", "get_origins(project) self.assertEquals(result, frozenset(['*'])) def test_project(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example'])", "url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar'", "assert result is True def test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost'])", "self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None)", "def test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self):", "result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None):", "result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result is False def test_custom_protocol_without_location(self):", "is False class IsValidIPTestCase(TestCase): def is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs)", "TestCase from sentry.utils.http import ( is_same_domain, is_valid_origin, get_origins, absolute_uri, is_valid_ip,", "is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result is True", "with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None) self.assertEquals(result, frozenset([])) def test_setting_all(self): with", "True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert result is False def", "def test_with_path(self): assert absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase):", "mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value = inputs result = is_valid_origin(origin, self.project)", "def test_is_same_domain_diff_port(self): url1 = 'http://example.com:80/foo/bar' url2 = 'http://example.com:13/biz/baz' self.assertFalse(is_same_domain(url1, url2))", "'192.168.1.1']) def test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8']) assert not self.is_valid_ip('127.0.0.1',", "IsValidIPTestCase(TestCase): def is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip, self.project)", "self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture def", "self.isValidOrigin('null', ['*']) self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null', ['http://example.com'])", "assert result is True result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result", "False class IsValidIPTestCase(TestCase): def is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return", "= get_origins(None) self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result =", "self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com'])) def test_setting_empty(self): with", "inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip, self.project) def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1',", "not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.1', '192.168.1.1']) def", "['example.com:443']) assert result is False def test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com',", "assert absolute_uri() == options.get('system.url-prefix') def test_with_path(self): assert absolute_uri('/foo/bar') == '%s/foo/bar'", "is True result = self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result is False", "self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:80'])", "project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example', 'http://example.com']))", "result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result is", "class SameDomainTestCase(TestCase): def test_is_same_domain(self): url1 = 'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz'", "= self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False) def test_full_uri_match(self): result = self.isValidOrigin('http://example.com',", "project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def", "result = get_origins(None) self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result", "test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self): result", "self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com', ['example.com'])", "= self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com',", "result = self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self): result =", "def test_project(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result", "url2)) class GetOriginsTestCase(TestCase): def test_project_default(self): project = Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None):", "result is True def test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert", "def test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert result is True", "result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result is True result =", "== '%s/foo/bar' % (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def test_is_same_domain(self): url1 =", "False) def test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True) def", "assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert result", "result is True result = self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result is", "result is False def test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar', ['sp://*.foobar']) assert", "[u'*.l\\xf8calhost']) assert result is True def test_punycode(self): result = self.isValidOrigin('http://xn--lcalhost-54a',", "= Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(project) self.assertEquals(result,", "import options from sentry.models import Project from sentry.testutils import TestCase", "['http://example.com']) self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result,", "['http://example.com']) self.assertEquals(result, True) def test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result,", "frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase): @fixture def project(self): return mock.Mock() def isValidOrigin(self,", "True) def test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True) def", "test_setting_uri(self): with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result = get_origins(None) self.assertEquals(result, frozenset(['http://example.com'])) class IsValidOriginTestCase(TestCase):", "test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result = get_origins(None) self.assertEquals(result, frozenset(['*'])) def test_setting_uri(self):", "True) def test_null_valid_with_global(self): result = self.isValidOrigin('null', ['*']) self.assertEquals(result, True) def", "assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result", "def test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self):", "= self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com',", "self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result is False def test_base_domain_does_not_match_subdomain(self): result =", "assert result is False def test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*'])", "['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result,", "exam import fixture from sentry import options from sentry.models import", "test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0', '127.0.0.0/8',", "self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True)", "'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def test_is_same_domain_diff_scheme(self): url1 = 'https://example.com/foo/bar' url2 =", "self.project) def test_not_in_blacklist(self): assert self.is_valid_ip('127.0.0.1', []) assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1',", "= self.isValidOrigin('dp://custom-thing/foo/bar', ['sp://']) assert result is False def test_custom_protocol_with_domainish_match(self): result", "from sentry import options from sentry.models import Project from sentry.testutils", "is True result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result is True", "def test_base_domain_matches_domain_with_explicit_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result is True", "= self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result is False def test_custom_protocol_without_location(self): result", "result = self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result is True def test_unparseable_uri(self):", "= self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com',", "['sp://']) assert result is False def test_custom_protocol_with_domainish_match(self): result = self.isValidOrigin('sp://custom-thing.foobar/foo/bar',", "test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self): result", "= self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80',", "from __future__ import absolute_import import mock from exam import fixture", "['sp://*.foobar']) assert result is True result = self.isValidOrigin('sp://custom-thing.bizbaz/foo/bar', ['sp://*.foobar']) assert", "self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False) def test_full_uri_match(self): result = self.isValidOrigin('http://example.com', ['http://example.com'])", "['http://example.com']) self.assertEquals(result, True) def test_null_valid_with_global(self): result = self.isValidOrigin('null', ['*']) self.assertEquals(result,", "inputs result = is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return result def test_global_wildcard_matches_domain(self):", "result = is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return result def test_global_wildcard_matches_domain(self): result", "self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com'])", "[u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert", "coding: utf-8 -*- from __future__ import absolute_import import mock from", "= self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar',", "= self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80',", "project(self): return mock.Mock() def isValidOrigin(self, origin, inputs): with mock.patch('sentry.utils.http.get_origins') as", "= is_valid_origin(origin, self.project) get_origins.assert_called_once_with(self.project) return result def test_global_wildcard_matches_domain(self): result =", "Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['*'])) def test_project(self):", "['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result,", "result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain(self): result =", "result = self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self): result =", "= Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['*'])) def", "is False def test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False)", "result = self.isValidOrigin('http://example.com', ['foo.example.com']) self.assertEquals(result, False) def test_full_uri_match(self): result =", "self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False) def test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing'])", "result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result is True def test_punycode(self):", "AbsoluteUriTest(TestCase): def test_without_path(self): assert absolute_uri() == options.get('system.url-prefix') def test_with_path(self): assert", "self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a'])", "def test_unparseable_uri(self): result = self.isValidOrigin('http://example.com', ['.']) assert result is False", "assert self.is_valid_ip('127.0.0.1', ['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1',", "result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self): project = Project.objects.get()", "True) def test_domain_wildcard_matches_subdomain(self): result = self.isValidOrigin('http://foo.example.com', ['*.example.com']) self.assertEquals(result, True) def", "result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self): result =", "True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost']) assert result is True result", "['0.0.0.0', '127.0.0.1', '192.168.1.1']) def test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8']) assert", "def test_project_and_setting(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN='http://example.com'): result", "def test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result is False", "True) def test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result, True) def", "is False def test_custom_protocol_without_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://*']) assert result", "= get_origins(None) self.assertEquals(result, frozenset([])) def test_setting_all(self): with self.settings(SENTRY_ALLOW_ORIGIN='*'): result =", "class GetOriginsTestCase(TestCase): def test_project_default(self): project = Project.objects.get() with self.settings(SENTRY_ALLOW_ORIGIN=None): result", "import absolute_import import mock from exam import fixture from sentry", "= self.isValidOrigin('null', ['*']) self.assertEquals(result, True) def test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null',", "[u'*.l\\xf8calhost']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'*.xn--lcalhost-54a']) assert", "is True def test_base_domain_does_not_match_domain_with_invalid_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com:443']) assert result", "result = self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self): result =", "self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True)", "self.isValidOrigin('http://example.com/foo/bar', ['http://example.com']) self.assertEquals(result, True) def test_null_valid_with_global(self): result = self.isValidOrigin('null', ['*'])", "result is True result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result is", "assert absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def test_is_same_domain(self):", "result is True def test_unparseable_uri(self): result = self.isValidOrigin('http://example.com', ['.']) assert", "result = self.isValidOrigin('http://example.com:80', ['example.com:80']) assert result is True def test_base_domain_does_not_match_domain_with_invalid_port(self):", "test_is_same_domain(self): url1 = 'http://example.com/foo/bar' url2 = 'http://example.com/biz/baz' self.assertTrue(is_same_domain(url1, url2)) def", "= Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result,", "result is True result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result is", "[u'l\\xf8calhost:80']) assert result is True def test_unparseable_uri(self): result = self.isValidOrigin('http://example.com',", "self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(project) self.assertEquals(result, frozenset(['http://foo.example'])) def test_project_and_setting(self): project =", "'192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert not", "result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.xn--lcalhost-54a']) assert result is True result =", "-*- coding: utf-8 -*- from __future__ import absolute_import import mock", "self.assertEquals(result, True) def test_null_valid_with_global(self): result = self.isValidOrigin('null', ['*']) self.assertEquals(result, True)", "self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True) def test_domain_wildcard_does_not_match_others(self): result = self.isValidOrigin('http://foo.com', ['*.example.com'])", "True) def test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True) def", "fixture from sentry import options from sentry.models import Project from", "True) def test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True) def", "self.isValidOrigin('https://example.com', ['http://example.com']) self.assertEquals(result, False) def test_full_uri_match_does_not_require_port(self): result = self.isValidOrigin('http://example.com:80', ['http://example.com'])", "= self.isValidOrigin('http://example.com:80', ['http://example.com']) self.assertEquals(result, True) def test_partial_uri_match(self): result = self.isValidOrigin('http://example.com/foo/bar',", "def test_null_invalid_graceful_with_domains(self): result = self.isValidOrigin('null', ['http://example.com']) self.assertEquals(result, False) def test_custom_protocol_with_location(self):", "(options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def test_is_same_domain(self): url1 = 'http://example.com/foo/bar' url2 =", "result = self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result is True result =", "result is False class IsValidIPTestCase(TestCase): def is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips',", "['*']) self.assertEquals(result, True) def test_domain_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*.example.com']) self.assertEquals(result,", "result = self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_explicit_port(self): result =", "['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result,", "class IsValidIPTestCase(TestCase): def is_valid_ip(self, ip, inputs): self.project.update_option('sentry:blacklisted_ips', inputs) return is_valid_ip(ip,", "def test_match_blacklist_range(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.0/8']) assert not self.is_valid_ip('127.0.0.1', ['0.0.0.0',", "'http://example.com'])) def test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None) self.assertEquals(result, frozenset([]))", "self.assertEquals(result, True) def test_domain_wildcard_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['*.example.com']) self.assertEquals(result, True)", "= self.isValidOrigin('http://xn--lcalhost-54a:80', [u'l\\xf8calhost:80']) assert result is True def test_unparseable_uri(self): result", "self.assertEquals(result, True) def test_base_domain_matches_domain_with_port(self): result = self.isValidOrigin('http://example.com:80', ['example.com']) self.assertEquals(result, True)", "self.assertEquals(result, True) def test_domain_wildcard_matches_subdomain_with_port(self): result = self.isValidOrigin('http://foo.example.com:80', ['*.example.com']) self.assertEquals(result, True)", "test_setting_empty(self): with self.settings(SENTRY_ALLOW_ORIGIN=None): result = get_origins(None) self.assertEquals(result, frozenset([])) def test_setting_all(self):", "is True def test_unparseable_uri(self): result = self.isValidOrigin('http://example.com', ['.']) assert result", "['http://example.com']) self.assertEquals(result, False) def test_custom_protocol_with_location(self): result = self.isValidOrigin('sp://custom-thing/foo/bar', ['sp://custom-thing']) assert", "['0.0.0.0', '192.168.1.1', '10.0.0.0/8']) def test_match_blacklist(self): assert not self.is_valid_ip('127.0.0.1', ['127.0.0.1']) assert", "return result def test_global_wildcard_matches_domain(self): result = self.isValidOrigin('http://example.com', ['*']) self.assertEquals(result, True)", "is False def test_unicode(self): result = self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result", "test_base_domain_matches_domain(self): result = self.isValidOrigin('http://example.com', ['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self): result", "== options.get('system.url-prefix') def test_with_path(self): assert absolute_uri('/foo/bar') == '%s/foo/bar' % (options.get('system.url-prefix'),)", "'%s/foo/bar' % (options.get('system.url-prefix'),) class SameDomainTestCase(TestCase): def test_is_same_domain(self): url1 = 'http://example.com/foo/bar'", "test_without_path(self): assert absolute_uri() == options.get('system.url-prefix') def test_with_path(self): assert absolute_uri('/foo/bar') ==", "isValidOrigin(self, origin, inputs): with mock.patch('sentry.utils.http.get_origins') as get_origins: get_origins.return_value = inputs", "self.isValidOrigin('sp://custom-thing-two/foo/bar', ['sp://custom-thing']) assert result is False def test_custom_protocol_without_location(self): result =", "= self.isValidOrigin(u'http://l\\xf8calhost', [u'*.l\\xf8calhost']) assert result is True def test_punycode(self): result", "self.isValidOrigin('http://foo.com', ['*.example.com']) self.assertEquals(result, False) def test_domain_wildcard_matches_domain_with_path(self): result = self.isValidOrigin('http://foo.example.com/foo/bar', ['*.example.com'])", "= self.isValidOrigin('http://xn--lcalhost-54a', [u'*.l\\xf8calhost']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a',", "self.isValidOrigin('http://l\\xc3\\xb8calhost', [u'*.xn--lcalhost-54a']) assert result is True result = self.isValidOrigin('http://xn--lcalhost-54a', [u'l\\xf8calhost'])", "assert result is False def test_base_domain_does_not_match_subdomain(self): result = self.isValidOrigin('http://example.com', ['foo.example.com'])", "self.assertEquals(result, frozenset(['*'])) def test_project(self): project = Project.objects.get() project.update_option('sentry:origins', [u'http://foo.example']) with", "['example.com']) self.assertEquals(result, True) def test_base_domain_matches_domain_with_path(self): result = self.isValidOrigin('http://example.com/foo/bar', ['example.com']) self.assertEquals(result," ]
[ "html, number, misc, text from markupsafe import Markup def bold(text):", "-*- coding: utf-8 -*- \"\"\"WebHelpers used in project.\"\"\" #from webhelpers", "-*- \"\"\"WebHelpers used in project.\"\"\" #from webhelpers import date, feedgenerator,", "webhelpers import date, feedgenerator, html, number, misc, text from markupsafe", "#from webhelpers import date, feedgenerator, html, number, misc, text from", "number, misc, text from markupsafe import Markup def bold(text): return", "import date, feedgenerator, html, number, misc, text from markupsafe import", "feedgenerator, html, number, misc, text from markupsafe import Markup def", "from markupsafe import Markup def bold(text): return Markup('<strong>%s</strong>' % text)", "utf-8 -*- \"\"\"WebHelpers used in project.\"\"\" #from webhelpers import date,", "<gh_stars>0 # -*- coding: utf-8 -*- \"\"\"WebHelpers used in project.\"\"\"", "\"\"\"WebHelpers used in project.\"\"\" #from webhelpers import date, feedgenerator, html,", "date, feedgenerator, html, number, misc, text from markupsafe import Markup", "used in project.\"\"\" #from webhelpers import date, feedgenerator, html, number,", "text from markupsafe import Markup def bold(text): return Markup('<strong>%s</strong>' %", "in project.\"\"\" #from webhelpers import date, feedgenerator, html, number, misc,", "coding: utf-8 -*- \"\"\"WebHelpers used in project.\"\"\" #from webhelpers import", "project.\"\"\" #from webhelpers import date, feedgenerator, html, number, misc, text", "# -*- coding: utf-8 -*- \"\"\"WebHelpers used in project.\"\"\" #from", "misc, text from markupsafe import Markup def bold(text): return Markup('<strong>%s</strong>'" ]
[ "listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit(): print 'usage: binary <path workload file>", "dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads", "workload file> <result dir> <runtime benchmark> <list of #ops> <list", "sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(','); if len(sys.argv) >=", "sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(',');", "<runtime benchmark> <list of #ops> <list of #threads> <list of", "benchmark> <list of #ops> <list of #threads> <list of #machines>", "import sys; from Thesis.load.loadBenchmark import runLoadBenchmarkAsBatch; from Thesis.cluster.RiakCluster import RiakCluster;", "NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15',", "= sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(','); if len(sys.argv)", "IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3, '/root/YCSB/loads/riak', ['1000000000'], ['1'], ['1']); #", "'172.16.33.17', '172.16.33.18']; def main(): if len(sys.argv) < 7: printUsageAndExit(); pathToWorkloadFile", "IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def", "[<list remote ycsb nodes>]'; exit(); cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER);", "#ops> <list of #threads> <list of #machines> [<list remote ycsb", "#machines> [<list remote ycsb nodes>]'; exit(); cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING,", "= 'riak'; CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16',", "printUsageAndExit(); pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]);", "print 'usage: binary <path workload file> <result dir> <runtime benchmark>", "main(): if len(sys.argv) < 7: printUsageAndExit(); pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo", "< 7: printUsageAndExit(); pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes", "<list of #ops> <list of #threads> <list of #machines> [<list", "Thesis.load.loadBenchmark import runLoadBenchmarkAsBatch; from Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING = 'riak';", "import RiakCluster; NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER =", "if len(sys.argv) < 7: printUsageAndExit(); pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo =", "exit(); cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3,", "= sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads =", "printUsageAndExit(): print 'usage: binary <path workload file> <result dir> <runtime", "from Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING = 'riak_consistency';", "'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18']; def main():", ">= 8: remoteYcsbNodes = sys.argv[7].split(','); else: remoteYcsbNodes = []; cluster", "file> <result dir> <runtime benchmark> <list of #ops> <list of", "<path workload file> <result dir> <runtime benchmark> <list of #ops>", "[]; cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes,", "binary <path workload file> <result dir> <runtime benchmark> <list of", "pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit(): print 'usage:", "= sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(','); if len(sys.argv) >= 8: remoteYcsbNodes", "= sys.argv[1]; dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec =", "'172.16.33.16', '172.16.33.17', '172.16.33.18']; def main(): if len(sys.argv) < 7: printUsageAndExit();", "= sys.argv[6].split(','); if len(sys.argv) >= 8: remoteYcsbNodes = sys.argv[7].split(','); else:", "runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines", "IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18']; def main(): if", "def main(): if len(sys.argv) < 7: printUsageAndExit(); pathToWorkloadFile = sys.argv[1];", "= RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec,", "if len(sys.argv) >= 8: remoteYcsbNodes = sys.argv[7].split(','); else: remoteYcsbNodes =", "<list of #machines> [<list remote ycsb nodes>]'; exit(); cluster =", "= int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines =", "<list of #threads> <list of #machines> [<list remote ycsb nodes>]';", "remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit(): print", "RiakCluster; NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14',", "dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit(): print 'usage: binary <path", "RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3, '/root/YCSB/loads/riak', ['1000000000'], ['1'],", "listOfAmountOfMachines); def printUsageAndExit(): print 'usage: binary <path workload file> <result", "sys; from Thesis.load.loadBenchmark import runLoadBenchmarkAsBatch; from Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING", "RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads,", "remoteYcsbNodes = []; cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes,", "['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18']; def main(): if len(sys.argv) <", "8: remoteYcsbNodes = sys.argv[7].split(','); else: remoteYcsbNodes = []; cluster =", "int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(',');", "CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3, '/root/YCSB/loads/riak', ['1000000000'], ['1'], ['1']);", "cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo,", "of #threads> <list of #machines> [<list remote ycsb nodes>]'; exit();", "from Thesis.load.loadBenchmark import runLoadBenchmarkAsBatch; from Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING =", "runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit():", "CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18'];", "sys.argv[1]; dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec = sys.argv[4].split(',');", "listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(','); if len(sys.argv) >= 8:", "len(sys.argv) < 7: printUsageAndExit(); pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo = sys.argv[2];", "def printUsageAndExit(): print 'usage: binary <path workload file> <result dir>", "pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes = int(sys.argv[3]); listOfOpsPerSec", "'172.16.33.18']; def main(): if len(sys.argv) < 7: printUsageAndExit(); pathToWorkloadFile =", "else: remoteYcsbNodes = []; cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster,", "sys.argv[7].split(','); else: remoteYcsbNodes = []; cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER);", "ycsb nodes>]'; exit(); cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'],", "= RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3, '/root/YCSB/loads/riak', ['1000000000'],", "Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER", "runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3, '/root/YCSB/loads/riak', ['1000000000'], ['1'], ['1']); # main();", "of #machines> [<list remote ycsb nodes>]'; exit(); cluster = RiakCluster(NORMAL_BINDING,", "remoteYcsbNodes = sys.argv[7].split(','); else: remoteYcsbNodes = []; cluster = RiakCluster(NORMAL_BINDING,", "7: printUsageAndExit(); pathToWorkloadFile = sys.argv[1]; dirToWriteResultTo = sys.argv[2]; runtimeBenchmarkInMinutes =", "len(sys.argv) >= 8: remoteYcsbNodes = sys.argv[7].split(','); else: remoteYcsbNodes = [];", "CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile, runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines);", "sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(','); if len(sys.argv) >= 8: remoteYcsbNodes =", "'usage: binary <path workload file> <result dir> <runtime benchmark> <list", "remote ycsb nodes>]'; exit(); cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster,", "'172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18']; def main(): if len(sys.argv) < 7:", "of #ops> <list of #threads> <list of #machines> [<list remote", "cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load', 3, '/root/YCSB/loads/riak',", "'riak'; CONSISTENCY_BINDING = 'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17',", "nodes>]'; exit(); cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, ['172.16.33.10'], '/root/YCSB/workloads/workload_load',", "import runLoadBenchmarkAsBatch; from Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING", "= 'riak_consistency'; IPS_IN_CLUSTER = ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18']; def", "listOfAmountOfMachines = sys.argv[6].split(','); if len(sys.argv) >= 8: remoteYcsbNodes = sys.argv[7].split(',');", "runLoadBenchmarkAsBatch; from Thesis.cluster.RiakCluster import RiakCluster; NORMAL_BINDING = 'riak'; CONSISTENCY_BINDING =", "= []; cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING, IPS_IN_CLUSTER); runLoadBenchmarkAsBatch(cluster, remoteYcsbNodes, pathToWorkloadFile,", "listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit(): print 'usage: binary <path workload", "runtimeBenchmarkInMinutes, dirToWriteResultTo, listOfOpsPerSec, listOfAmountThreads, listOfAmountOfMachines); def printUsageAndExit(): print 'usage: binary", "listOfOpsPerSec = sys.argv[4].split(','); listOfAmountThreads = sys.argv[5].split(','); listOfAmountOfMachines = sys.argv[6].split(','); if", "= ['172.16.33.14', '172.16.33.15', '172.16.33.16', '172.16.33.17', '172.16.33.18']; def main(): if len(sys.argv)", "#threads> <list of #machines> [<list remote ycsb nodes>]'; exit(); cluster", "sys.argv[6].split(','); if len(sys.argv) >= 8: remoteYcsbNodes = sys.argv[7].split(','); else: remoteYcsbNodes", "<result dir> <runtime benchmark> <list of #ops> <list of #threads>", "dir> <runtime benchmark> <list of #ops> <list of #threads> <list", "= sys.argv[7].split(','); else: remoteYcsbNodes = []; cluster = RiakCluster(NORMAL_BINDING, CONSISTENCY_BINDING," ]
[ "'The triagers need to have a \\'Fallback\\' entry.', 'triagers': {", "'ab', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') == ( '<EMAIL>', 'ab', )", "= RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25') == {}", "'doc': 'All the dates are the duty end dates.', '2019-02-21':", "'G H', 'ismanager': 'FALSE', 'title': 'nothing', } ] ) def", "'2019-02-21': 'A B', '2019-02-28': 'C D', '2019-03-07': 'E F', },", "\\'Fallback\\' entry.', 'triagers': { 'A B': {'bzmail': '<EMAIL>'}, 'C D':", "'All the dates are the duty end dates.', '2019-02-21': 'E", "'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick': 'ij'}, } @staticmethod def _get_nick(x, bzmail):", "assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'),", "of the MPL was not distributed with this file, #", "at http://mozilla.org/MPL/2.0/. import unittest from mock import patch from auto_nag.people", "D': {'bzmail': '<EMAIL>'}, 'E F': {'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'},", "from auto_nag.people import People from auto_nag.round_robin import BadFallback, RoundRobin class", "'doc': 'The triagers need to have a \\'Fallback\\' entry.', 'triagers':", "rr.get(self.mk_bug('P2::C2'), '2019-02-24') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-24')", "'<EMAIL>'}, }, 'components': {'P1::C1': 'default', 'P2::C2': 'default', 'P3::C3': 'special'}, 'default':", "assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P1::C1'),", "one at http://mozilla.org/MPL/2.0/. import unittest from mock import patch from", "2.0. If a copy of the MPL was not distributed", "}, } people = People( [ { 'mail': '<EMAIL>', 'cn':", "from mock import patch from auto_nag.people import People from auto_nag.round_robin", "[ { 'mail': '<EMAIL>', 'cn': 'G H', 'ismanager': 'FALSE', 'title':", "'P2::C2': 'default', 'P3::C3': 'special'}, 'default': { 'doc': 'All the dates", "'<EMAIL>'}, 'C D': {'bzmail': '<EMAIL>'}, 'E F': {'bzmail': '<EMAIL>'}, 'Fallback':", "was not distributed with this file, # You can obtain", "'<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') == ( '<EMAIL>', 'ef',", "( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') == ( '<EMAIL>',", "obtain one at http://mozilla.org/MPL/2.0/. import unittest from mock import patch", "( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == ( '<EMAIL>',", "{} assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>':", "== ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') == (", ") assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') == ( '<EMAIL>', 'ab', ) assert", "'gh', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') == ( '<EMAIL>', 'gh', )", "( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') == ( '<EMAIL>',", "rr.get(self.mk_bug('P2::C2'), '2019-02-28') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-28')", "import BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase): config = { 'doc': 'The", "'ef', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') == ( '<EMAIL>', 'cd', )", "'ef', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') == ( '<EMAIL>', 'ef', )", "== ( '<EMAIL>', 'ij', ) def test_get_who_to_nag(self): rr = RoundRobin(", "'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') == ( '<EMAIL>', 'ab', )", "pc): p, c = pc.split('::') return { 'product': p, 'component':", "# You can obtain one at http://mozilla.org/MPL/2.0/. import unittest from", "Code Form is subject to the terms of the Mozilla", "p, c = pc.split('::') return { 'product': p, 'component': c,", "assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P3::C3'),", ") def mk_bug(self, pc): p, c = pc.split('::') return {", "'special'}, 'default': { 'doc': 'All the dates are the duty", "This Source Code Form is subject to the terms of", "mock import patch from auto_nag.people import People from auto_nag.round_robin import", "If a copy of the MPL was not distributed with", "rr.get(self.mk_bug('P3::C3'), '2019-02-17') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-24')", "'2019-03-07': 'E F', }, 'special': { 'doc': 'All the dates", "( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') == ( '<EMAIL>',", "( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') == ( '<EMAIL>',", "rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25') == {} assert rr.get_who_to_nag('2019-02-28')", "rr.get(self.mk_bug('P3::C3'), '2019-03-05') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-08')", "@staticmethod def _get_nick(x, bzmail): return bzmail.split('@')[0] def test_get(self): with patch.object(RoundRobin,", "( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') == ( '<EMAIL>',", "class TestRoundRobin(unittest.TestCase): config = { 'doc': 'The triagers need to", "def test_get_who_to_nag(self): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert", "patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people )", "MPL was not distributed with this file, # You can", "( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') == ( '<EMAIL>',", "# coding: utf-8 # This Source Code Form is subject", "return { 'product': p, 'component': c, 'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick':", "to the terms of the Mozilla Public # License, v.", "'components': {'P1::C1': 'default', 'P2::C2': 'default', 'P3::C3': 'special'}, 'default': { 'doc':", ") assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert", "bzmail.split('@')[0] def test_get(self): with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr = RoundRobin(", "import People from auto_nag.round_robin import BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase): config", "'2019-02-28': 'C D', '2019-03-07': 'E F', }, 'special': { 'doc':", "patch from auto_nag.people import People from auto_nag.round_robin import BadFallback, RoundRobin", "triagers need to have a \\'Fallback\\' entry.', 'triagers': { 'A", "'2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') ==", "}, 'components': {'P1::C1': 'default', 'P2::C2': 'default', 'P3::C3': 'special'}, 'default': {", "'Fallback': {'bzmail': '<EMAIL>'}, }, 'components': {'P1::C1': 'default', 'P2::C2': 'default', 'P3::C3':", "F': {'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'}, }, 'components': {'P1::C1': 'default',", "'E F': {'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'}, }, 'components': {'P1::C1':", "assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P3::C3'),", "rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == ( '<EMAIL>', 'ij', ) def test_get_who_to_nag(self): rr", "assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P2::C2'),", ") assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') == ( '<EMAIL>', 'cd', ) assert", "'<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') == ( '<EMAIL>', 'ab',", "assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']}", "p, 'component': c, 'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick': 'ij'}, } @staticmethod", "'gh', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') == ( '<EMAIL>', 'gh', )", "'default', 'P3::C3': 'special'}, 'default': { 'doc': 'All the dates are", "auto_nag.people import People from auto_nag.round_robin import BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase):", "rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-17')", ") assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') == ( '<EMAIL>', 'cd', ) assert", "{ 'mail': '<EMAIL>', 'cn': 'G H', 'ismanager': 'FALSE', 'title': 'nothing',", "== ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') == (", "== ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') == (", "assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'),", "'gh', ) assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == ( '<EMAIL>', 'ij', )", "H', 'ismanager': 'FALSE', 'title': 'nothing', } ] ) def mk_bug(self,", "'ismanager': 'FALSE', 'title': 'nothing', } ] ) def mk_bug(self, pc):", "rr.get(self.mk_bug('P1::C1'), '2019-02-24') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-24')", ") assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') == ( '<EMAIL>', 'cd', ) assert", "'A B': {'bzmail': '<EMAIL>'}, 'C D': {'bzmail': '<EMAIL>'}, 'E F':", "this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import", "rr.get(self.mk_bug('P1::C1'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-08')", "== ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') == (", "B', '2019-03-07': 'C D', }, } people = People( [", "'E F', }, 'special': { 'doc': 'All the dates are", "{'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10') ==", "( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') == ( '<EMAIL>',", "assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']}", "'2019-03-07': 'C D', }, } people = People( [ {", "rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']} with patch.object(RoundRobin, 'is_mozilla', return_value=False): rr =", "test_get(self): with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr = RoundRobin( rr={'team': TestRoundRobin.config},", "can obtain one at http://mozilla.org/MPL/2.0/. import unittest from mock import", "RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') == (", "subject to the terms of the Mozilla Public # License,", "'E F', '2019-02-28': 'A B', '2019-03-07': 'C D', }, }", "( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') == ( '<EMAIL>',", "{'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07') ==", "end dates.', '2019-02-21': 'E F', '2019-02-28': 'A B', '2019-03-07': 'C", "from auto_nag.round_robin import BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase): config = {", "dates.', '2019-02-21': 'A B', '2019-02-28': 'C D', '2019-03-07': 'E F',", "== {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07')", "copy of the MPL was not distributed with this file,", "rr.get(self.mk_bug('P1::C1'), '2019-02-17') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-17')", "'<EMAIL>', 'cn': 'G H', 'ismanager': 'FALSE', 'title': 'nothing', } ]", "with this file, # You can obtain one at http://mozilla.org/MPL/2.0/.", "['']} assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']} with patch.object(RoundRobin, 'is_mozilla', return_value=False):", "'default', 'P2::C2': 'default', 'P3::C3': 'special'}, 'default': { 'doc': 'All the", "'2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') ==", "import unittest from mock import patch from auto_nag.people import People", "have a \\'Fallback\\' entry.', 'triagers': { 'A B': {'bzmail': '<EMAIL>'},", "People( [ { 'mail': '<EMAIL>', 'cn': 'G H', 'ismanager': 'FALSE',", "['']} with patch.object(RoundRobin, 'is_mozilla', return_value=False): rr = RoundRobin( rr={'team': TestRoundRobin.config},", "= { 'doc': 'The triagers need to have a \\'Fallback\\'", "rr.get(self.mk_bug('P2::C2'), '2019-03-05') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-05')", "'2019-03-05') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') ==", "rr.get(self.mk_bug('P2::C2'), '2019-02-17') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-17')", "rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']} with", "assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == ( '<EMAIL>', 'ij', ) def test_get_who_to_nag(self):", "end dates.', '2019-02-21': 'A B', '2019-02-28': 'C D', '2019-03-07': 'E", "'product': p, 'component': c, 'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick': 'ij'}, }", "People from auto_nag.round_robin import BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase): config =", "'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') == ( '<EMAIL>', 'ab', )", "dates are the duty end dates.', '2019-02-21': 'E F', '2019-02-28':", "assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P1::C1'),", "def _get_nick(x, bzmail): return bzmail.split('@')[0] def test_get(self): with patch.object(RoundRobin, 'get_nick',", "== ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') == (", "distributed with this file, # You can obtain one at", "'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') == ( '<EMAIL>', 'ef', )", "'<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') == ( '<EMAIL>', 'ab',", "= pc.split('::') return { 'product': p, 'component': c, 'triage_owner': '<EMAIL>',", "( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') == ( '<EMAIL>',", "assert rr.get_who_to_nag('2019-02-25') == {} assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']} assert", "( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') == ( '<EMAIL>',", ") assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') == ( '<EMAIL>', 'ab', ) assert", "'ij', ) def test_get_who_to_nag(self): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people", "need to have a \\'Fallback\\' entry.', 'triagers': { 'A B':", "{'bzmail': '<EMAIL>'}, 'E F': {'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'}, },", "coding: utf-8 # This Source Code Form is subject to", "License, v. 2.0. If a copy of the MPL was", "['']} assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>':", "{'P1::C1': 'default', 'P2::C2': 'default', 'P3::C3': 'special'}, 'default': { 'doc': 'All", "def test_get(self): with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr = RoundRobin( rr={'team':", "'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') == ( '<EMAIL>', 'cd', )", "of the Mozilla Public # License, v. 2.0. If a", "a \\'Fallback\\' entry.', 'triagers': { 'A B': {'bzmail': '<EMAIL>'}, 'C", "} ] ) def mk_bug(self, pc): p, c = pc.split('::')", "people = People( [ { 'mail': '<EMAIL>', 'cn': 'G H',", "{ 'doc': 'All the dates are the duty end dates.',", "assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']}", "'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') == ( '<EMAIL>', 'cd', )", "the Mozilla Public # License, v. 2.0. If a copy", ") assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') == ( '<EMAIL>', 'cd', ) assert", "'2019-03-05') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') ==", "rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']} assert", "'is_mozilla', return_value=False): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) self.assertRaises(BadFallback,", "{'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'}, }, 'components': {'P1::C1': 'default', 'P2::C2':", "'<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'}, }, 'components': {'P1::C1': 'default', 'P2::C2': 'default',", "F', }, 'special': { 'doc': 'All the dates are the", "'<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') == ( '<EMAIL>', 'gh',", "rr.get(self.mk_bug('P3::C3'), '2019-02-24') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-28')", "the dates are the duty end dates.', '2019-02-21': 'E F',", "assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('Foo::Bar'),", "'nothing', } ] ) def mk_bug(self, pc): p, c =", "( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') == ( '<EMAIL>',", "'2019-02-17') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') ==", "with patch.object(RoundRobin, 'is_mozilla', return_value=False): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people", "( '<EMAIL>', 'ij', ) def test_get_who_to_nag(self): rr = RoundRobin( rr={'team':", "to have a \\'Fallback\\' entry.', 'triagers': { 'A B': {'bzmail':", "'ij'}, } @staticmethod def _get_nick(x, bzmail): return bzmail.split('@')[0] def test_get(self):", "a copy of the MPL was not distributed with this", "{'nick': 'ij'}, } @staticmethod def _get_nick(x, bzmail): return bzmail.split('@')[0] def", "assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'),", ") assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert", ") assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert", "'title': 'nothing', } ] ) def mk_bug(self, pc): p, c", "{'bzmail': '<EMAIL>'}, }, 'components': {'P1::C1': 'default', 'P2::C2': 'default', 'P3::C3': 'special'},", "rr.get(self.mk_bug('P3::C3'), '2019-02-28') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-05')", "B': {'bzmail': '<EMAIL>'}, 'C D': {'bzmail': '<EMAIL>'}, 'E F': {'bzmail':", "file, # You can obtain one at http://mozilla.org/MPL/2.0/. import unittest", "patch.object(RoundRobin, 'is_mozilla', return_value=False): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people )", "Form is subject to the terms of the Mozilla Public", "# This Source Code Form is subject to the terms", "auto_nag.round_robin import BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase): config = { 'doc':", "'get_nick', new=TestRoundRobin._get_nick): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert", "'<EMAIL>'}, 'E F': {'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail': '<EMAIL>'}, }, 'components':", "import patch from auto_nag.people import People from auto_nag.round_robin import BadFallback,", "v. 2.0. If a copy of the MPL was not", "'2019-02-28') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') ==", "'<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') == ( '<EMAIL>', 'cd',", "mk_bug(self, pc): p, c = pc.split('::') return { 'product': p,", "the duty end dates.', '2019-02-21': 'E F', '2019-02-28': 'A B',", "} @staticmethod def _get_nick(x, bzmail): return bzmail.split('@')[0] def test_get(self): with", "'<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') == ( '<EMAIL>', 'ef',", "terms of the Mozilla Public # License, v. 2.0. If", "'2019-02-28': 'A B', '2019-03-07': 'C D', }, } people =", "'default': { 'doc': 'All the dates are the duty end", "{'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']} with patch.object(RoundRobin, 'is_mozilla',", "You can obtain one at http://mozilla.org/MPL/2.0/. import unittest from mock", "the MPL was not distributed with this file, # You", "'<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') == ( '<EMAIL>', 'cd',", "bzmail): return bzmail.split('@')[0] def test_get(self): with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr", "dates are the duty end dates.', '2019-02-21': 'A B', '2019-02-28':", "'special': { 'doc': 'All the dates are the duty end", "( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') == ( '<EMAIL>',", "return bzmail.split('@')[0] def test_get(self): with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr =", "BadFallback, RoundRobin class TestRoundRobin(unittest.TestCase): config = { 'doc': 'The triagers", "assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P2::C2'),", "'cn': 'G H', 'ismanager': 'FALSE', 'title': 'nothing', } ] )", "'<EMAIL>', 'triage_owner_detail': {'nick': 'ij'}, } @staticmethod def _get_nick(x, bzmail): return", "http://mozilla.org/MPL/2.0/. import unittest from mock import patch from auto_nag.people import", "( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') == ( '<EMAIL>',", "rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) self.assertRaises(BadFallback, rr.get_who_to_nag, '2019-03-01')", "assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'),", "{ 'A B': {'bzmail': '<EMAIL>'}, 'C D': {'bzmail': '<EMAIL>'}, 'E", "'<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') == ( '<EMAIL>', 'gh',", "'2019-03-01') == ( '<EMAIL>', 'ij', ) def test_get_who_to_nag(self): rr =", "return_value=False): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) self.assertRaises(BadFallback, rr.get_who_to_nag,", "unittest from mock import patch from auto_nag.people import People from", ") assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == ( '<EMAIL>', 'ij', ) def", "== ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') == (", "rr.get(self.mk_bug('P1::C1'), '2019-03-05') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-05')", "'2019-02-17') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') ==", "= People( [ { 'mail': '<EMAIL>', 'cn': 'G H', 'ismanager':", "rr.get_who_to_nag('2019-02-25') == {} assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05')", "rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25') ==", "# License, v. 2.0. If a copy of the MPL", "( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') == ( '<EMAIL>',", ") assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') == ( '<EMAIL>', 'ef', ) assert", "test_get_who_to_nag(self): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25')", "= RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') ==", "def mk_bug(self, pc): p, c = pc.split('::') return { 'product':", "'C D', }, } people = People( [ { 'mail':", "D', '2019-03-07': 'E F', }, 'special': { 'doc': 'All the", "'2019-02-21': 'E F', '2019-02-28': 'A B', '2019-03-07': 'C D', },", "assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'),", "'ab', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') == ( '<EMAIL>', 'ef', )", "c = pc.split('::') return { 'product': p, 'component': c, 'triage_owner':", "{'<EMAIL>': ['']} with patch.object(RoundRobin, 'is_mozilla', return_value=False): rr = RoundRobin( rr={'team':", "are the duty end dates.', '2019-02-21': 'E F', '2019-02-28': 'A", "Mozilla Public # License, v. 2.0. If a copy of", "] ) def mk_bug(self, pc): p, c = pc.split('::') return", "'P3::C3': 'special'}, 'default': { 'doc': 'All the dates are the", ") assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') == ( '<EMAIL>', 'ab', ) assert", "_get_nick(x, bzmail): return bzmail.split('@')[0] def test_get(self): with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick):", "'2019-03-05') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') ==", ") assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') == ( '<EMAIL>', 'ab', ) assert", "'triagers': { 'A B': {'bzmail': '<EMAIL>'}, 'C D': {'bzmail': '<EMAIL>'},", "'cd', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') == ( '<EMAIL>', 'gh', )", "TestRoundRobin(unittest.TestCase): config = { 'doc': 'The triagers need to have", "entry.', 'triagers': { 'A B': {'bzmail': '<EMAIL>'}, 'C D': {'bzmail':", ") assert rr.get_who_to_nag('2019-02-25') == {} assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']}", "== {} assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-05') ==", "}, 'special': { 'doc': 'All the dates are the duty", "== ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-17') == (", "== ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') == (", "'All the dates are the duty end dates.', '2019-02-21': 'A", "pc.split('::') return { 'product': p, 'component': c, 'triage_owner': '<EMAIL>', 'triage_owner_detail':", "'2019-02-24') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-28') ==", "new=TestRoundRobin._get_nick): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'),", "rr.get(self.mk_bug('P2::C2'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-08')", "'<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') == ( '<EMAIL>', 'cd',", "{'bzmail': '<EMAIL>'}, 'C D': {'bzmail': '<EMAIL>'}, 'E F': {'bzmail': '<EMAIL>'},", "== ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') == (", ") assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') == ( '<EMAIL>', 'ef', ) assert", "is subject to the terms of the Mozilla Public #", "rr.get(self.mk_bug('P3::C3'), '2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01')", "'<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') == ( '<EMAIL>', 'ab',", "the terms of the Mozilla Public # License, v. 2.0.", "rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']} assert", "config = { 'doc': 'The triagers need to have a", "c, 'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick': 'ij'}, } @staticmethod def _get_nick(x,", "'mail': '<EMAIL>', 'cn': 'G H', 'ismanager': 'FALSE', 'title': 'nothing', }", "Public # License, v. 2.0. If a copy of the", "== {'<EMAIL>': ['']} with patch.object(RoundRobin, 'is_mozilla', return_value=False): rr = RoundRobin(", "'<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P1::C1'), '2019-03-08') == ( '<EMAIL>', 'gh',", "RoundRobin class TestRoundRobin(unittest.TestCase): config = { 'doc': 'The triagers need", "== ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') == (", "assert rr.get(self.mk_bug('P1::C1'), '2019-03-05') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P2::C2'),", "the duty end dates.', '2019-02-21': 'A B', '2019-02-28': 'C D',", "assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']} with patch.object(RoundRobin, 'is_mozilla', return_value=False): rr", "'<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') == ( '<EMAIL>', 'cd',", "rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') == ( '<EMAIL>',", "assert rr.get(self.mk_bug('P2::C2'), '2019-03-05') == ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P3::C3'),", "TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') == ( '<EMAIL>', 'ab',", "'ef', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') == ( '<EMAIL>', 'cd', )", "B', '2019-02-28': 'C D', '2019-03-07': 'E F', }, 'special': {", "'2019-02-17') == ( '<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') ==", "are the duty end dates.', '2019-02-21': 'A B', '2019-02-28': 'C", "{ 'doc': 'The triagers need to have a \\'Fallback\\' entry.',", "people=TestRoundRobin.people ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-17') == ( '<EMAIL>', 'ab', )", ") assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') == ( '<EMAIL>', 'ef', ) assert", "== ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-08') == (", "Source Code Form is subject to the terms of the", "utf-8 # This Source Code Form is subject to the", "['']} assert rr.get_who_to_nag('2019-03-05') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>':", "duty end dates.', '2019-02-21': 'A B', '2019-02-28': 'C D', '2019-03-07':", "'2019-02-24') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') ==", "'A B', '2019-03-07': 'C D', }, } people = People(", "'2019-02-28') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') ==", ") def test_get_who_to_nag(self): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people )", "not distributed with this file, # You can obtain one", "'component': c, 'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick': 'ij'}, } @staticmethod def", "'2019-03-08') == ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('P2::C2'), '2019-03-08') ==", "assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'),", "== {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10') == {'<EMAIL>': ['']} with patch.object(RoundRobin,", "F', '2019-02-28': 'A B', '2019-03-07': 'C D', }, } people", "{ 'product': p, 'component': c, 'triage_owner': '<EMAIL>', 'triage_owner_detail': {'nick': 'ij'},", "== ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') == (", "the dates are the duty end dates.', '2019-02-21': 'A B',", "'triage_owner_detail': {'nick': 'ij'}, } @staticmethod def _get_nick(x, bzmail): return bzmail.split('@')[0]", "'<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == ( '<EMAIL>', 'ij',", "== ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-24') == (", "'2019-02-28') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-28') ==", "== ( '<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') == (", "'2019-02-24') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-24') ==", "== ( '<EMAIL>', 'gh', ) assert rr.get(self.mk_bug('Foo::Bar'), '2019-03-01') == (", "duty end dates.', '2019-02-21': 'E F', '2019-02-28': 'A B', '2019-03-07':", "people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25') == {} assert rr.get_who_to_nag('2019-02-28') == {'<EMAIL>':", "'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-28') == ( '<EMAIL>', 'cd', )", "'FALSE', 'title': 'nothing', } ] ) def mk_bug(self, pc): p,", "rr.get(self.mk_bug('P1::C1'), '2019-02-28') == ( '<EMAIL>', 'cd', ) assert rr.get(self.mk_bug('P2::C2'), '2019-02-28')", "'C D', '2019-03-07': 'E F', }, 'special': { 'doc': 'All", "RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25') == {} assert", "dates.', '2019-02-21': 'E F', '2019-02-28': 'A B', '2019-03-07': 'C D',", "== {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-07') == {'<EMAIL>': ['']} assert rr.get_who_to_nag('2019-03-10')", "D', }, } people = People( [ { 'mail': '<EMAIL>',", "TestRoundRobin.config}, people=TestRoundRobin.people ) assert rr.get_who_to_nag('2019-02-25') == {} assert rr.get_who_to_nag('2019-02-28') ==", "'<EMAIL>', 'ab', ) assert rr.get(self.mk_bug('P3::C3'), '2019-02-17') == ( '<EMAIL>', 'ef',", "'<EMAIL>', 'ef', ) assert rr.get(self.mk_bug('P3::C3'), '2019-03-05') == ( '<EMAIL>', 'cd',", "'A B', '2019-02-28': 'C D', '2019-03-07': 'E F', }, 'special':", "'C D': {'bzmail': '<EMAIL>'}, 'E F': {'bzmail': '<EMAIL>'}, 'Fallback': {'bzmail':", "'<EMAIL>', 'ij', ) def test_get_who_to_nag(self): rr = RoundRobin( rr={'team': TestRoundRobin.config},", "with patch.object(RoundRobin, 'get_nick', new=TestRoundRobin._get_nick): rr = RoundRobin( rr={'team': TestRoundRobin.config}, people=TestRoundRobin.people", "} people = People( [ { 'mail': '<EMAIL>', 'cn': 'G", ") assert rr.get(self.mk_bug('P1::C1'), '2019-02-24') == ( '<EMAIL>', 'cd', ) assert" ]
[ "\" if(!(PyObject*)return_val && !exception_occurred)\\n\" \\ \" {\\n \\n\" \\ \"", "with (e.g. object files not implied by 'sources', static libraries", "\\ '#endif\\n' \\ ' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals", "\\n\" \\ \"} \\n\" return_code = \" /* cleanup code", "return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1, support_code=None, headers=[], customize=None,", "PyObject* raw_locals;\\n' \\ ' PyObject* raw_globals;\\n' \\ '#endif\\n' \\ '", "to Python in the `return_val`. arg_names : [str], optional A", "return the function. Make sure # the directory where it", "when linking object files together to create the extension (or", "pass to SWIG if a source file has the .i", "' = '.join(self.arg_specs.py_variables()) if py_vars: init_values = py_vars + '", "def python_function_definition_code(self): args = (self.name, self.name) function_decls = '{\"%s\",(PyCFunction)%s ,", "sources : [string] List of source filenames, relative to the", "in headers: mod.customize.add_header(header) # it's nice to let the users", "= global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir) for func in function_list: try:", "in the type conversions section of the main documentation for", "type double or double complex, all variables maintain their standard", "# if we get here, the function wasn't found raise", "<NAME>'s :class:`distutils.extension.Extension` class for convenience: sources : [string] List of", "return_val. Also, the contents of mutable objects can be changed", "in here. # the PYTHONCOMPILED environment variable offers the most", "the C/C++ code. It defaults to an empty string. local_dict", "force : {0, 1}, optional If 1, the C++ code", "from . import build_tools compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code()", "you'd like to use a different set of type conversions", "\\ \" exception_occurred = 1; \\n\" \\ \"} \\n\" return_code", "\"command line\" makes sense, this is typically a list of", "lives is in the python path. try: sys.path.insert(0,storage_dir) exec('import '", "platforms it could be anything. extra_link_args : [string] Any extra", "default, specify them here. Look in the type conversions section", "The name of compiler to use when compiling. On windows,", "arrays as input variables. Setting this to 1 will cause", "it's nice to let the users know when anything gets", "PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name def parse_tuple_code(self):", ": [string] List of macros to undefine explicitly. library_dirs :", "to SWIG if a source file has the .i extension.", "NameError(msg) except KeyError: pass # 2. try catalog cache. function_list", "undefine explicitly. library_dirs : [string] List of directories to search", "have its own error type, instead of # checking the", "+ \\ init_values + parse_tuple def arg_declaration_code(self): \"\"\"Return the declaration", "is typically a list of command-line arguments, but for other", "section of the main documentation for examples. auto_downcast : {1,0},", "line\" makes sense, this is typically a list of command-line", "arguments, but for other platforms it could be anything. extra_link_args", "that yet. msg = str(msg) if msg[:16] == \"Conversion Error\":", "link time. libraries : [string] List of library names (not", "to create the local dict as a string.\"\"\" arg_strings =", "code = ndarray_api_version + '\\n' + code module_path = function_catalog.unique_module_name(code,", "compiled function. This could be declarations of functions, classes, or", "= NULL;\\n' \\ 'PyObject *py__globals = NULL;\\n' py_objects = ',", "-- that is the locals from the function that called", "List of macros to undefine explicitly. library_dirs : [string] List", "this isn't available, it looks for mingw32 (the gcc compiler).", "undef_macros : [string] List of macros to undefine explicitly. library_dirs", "in changed locals and globals here...*/ \\n' \\ ' }\\n'", "2. try catalog cache. function_list = function_catalog.get_functions_fast(code) for func in", "return \"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return the code to create the", "command as source for a Python extension. .. note:: The", "files together to create the extension (or to create a", "returned from the C/C++ code through a special argument called", "create the extension (or to create a new static Python", "to the module. for header in headers: mod.customize.add_header(header) # it's", "py_vars = ' = '.join(self.arg_specs.py_variables()) if py_vars: init_values = py_vars", "libraries : [string] List of library names (not filenames or", "(I should add 'gcc' though to this). On windows, the", "even one of the arrays has type double or double", "declarations for all PyObjects are done also. This code got", "types. newarr_converter : int, optional Unused. Other Parameters ---------------- Relevant", "arg_names : [str], optional A list of Python variable names", "be used much). type_converters : [type converters], optional These guys", "to the C/C++ code by assignment much like variables passed", "on the fly. Variables in the local and global Python", "are passed through to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) # import", "= func return results except TypeError as msg: # should", "where it lives is in the python path. try: sys.path.insert(0,storage_dir)", "py_objects = ', '.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects = 'PyObject '", "for the compilation process and can be useful if your", "global dictionary of the calling function is used. force :", "return results except: # should specify argument types here. pass", "to an empty string. local_dict : dict, optional If specified,", "complex, all variables maintain their standard types. newarr_converter : int,", "portability). define_macros : [(name : string, value : string|None)] List", "py::object(); \\n\" \\ \" exception_occurred = 1; \\n\" \\ \"}", ": str, optional The name of compiler to use when", "the users know when anything gets compiled, as the #", "code exits and returns to Python. inline has quite a", "is used. force : {0, 1}, optional If 1, the", "needed for inline extension functions def function_declaration_code(self): code = 'static", "exec('import ' + module_name) func = eval(module_name+'.compiled_func') finally: del sys.path[0]", "\"{ \\n\" + \\ \" return_val = py::object(); \\n\" \\", "(except on windows with msvc where it still prints some", "PyObject* raw_globals;\\n' \\ '#endif\\n' \\ ' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\", "% args return function_decls class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info())", "inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='', verbose=0, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1,", "of directories to search for C/C++ header files (in Unix", "sense, this is typically a list of command-line arguments, but", "These are duplicated from <NAME>'s :class:`distutils.extension.Extension` class for convenience: sources", "additional parameters. \"\"\" # this grabs the local variables from", "or structures. headers : [str], optional A list of strings", "ValueError('function with correct signature not found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''):", "code within Python scripts. ``inline()`` compiles and executes C/C++ code", "only useful if your editing `support_code` a lot. compiler :", "1 informs you when compiling starts, finishes, and how long", "msg[:16] == \"Conversion Error\": pass else: raise NameError(msg) # 3.", "variables from the *previous* call # frame -- that is", "except: # should specify argument types here. pass # if", "frame -- that is the locals from the function that", "customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) else: #", "`module_path` file is always appended to the front of this", "object files not implied by 'sources', static libraries that must", "we get here, the function wasn't found raise ValueError('function with", "for arg in self.arg_specs] return \"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return the", "\\n' \\ ' { \\n' \\ '#if defined(__GNUC__) || defined(__ICC)\\n'", "compiler to use when compiling. On windows, it understands 'msvc'", "\"\"\" # this grabs the local variables from the *previous*", "special argument called return_val. Also, the contents of mutable objects", "catalog module_dir = global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir) for func in", "if we get here, the function wasn't found raise ValueError('function", "distribution root (where the setup script lives), in Unix form", "returns to Python. inline has quite a few options as", "be pasted at the end of a ``#include`` statement in", "Any extra options to pass to SWIG if a source", "correct signature not found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame =", "inline is called. This is really only useful for debugging,", "define it to or None to define it without a", "try_code + \\ indent(catch_code,4) + \\ return_code return all_code def", "on. language : string Extension language (i.e. \"c\", \"c++\", \"objc\").", "script lives), in Unix form (slash-separated) for portability. Source files", "\\ cleanup_code + \\ \" if(!(PyObject*)return_val && !exception_occurred)\\n\" \\ \"", "# that will contain the function. # storage_dir = catalog.intermediate_dir()", "code\" needed by the function to the module. if support_code:", "the compilation process and can be useful if your having", "headers : [str], optional A list of strings specifying header", "own error type, instead of # checking the beginning of", "# add the extra headers needed by the function to", "= 'py::object return_val;\\n' \\ 'int exception_occurred = 0;\\n' \\ 'PyObject", "On Unix, it'll probably use the same compiler that was", "= [arg.cleanup_code() for arg in self.arg_specs] return \"\".join(arg_strings) def arg_local_dict_code(self):", "Inline C/C++ code within Python scripts. ``inline()`` compiles and executes", "accepted to specify extra information needed for compiling. Parameters ----------", "}\\n \\n\" \\ \" return return_val.disown(); \\n\" \\ \"} \\n\"", "run time (for shared extensions, this is when the extension", "dict as a string.\"\"\" arg_strings = [arg.local_dict_code() for arg in", "dictionary of the calling function is used. global_dict : dict,", "custom_info object), then set the module customization. if customize: mod.customize", "compile the library module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir,", "(in Unix form for portability). define_macros : [(name : string,", "PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name def template_declaration_code(self):", "is None: global_dict = call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters)", "ext_tools from . import catalog from . import common_info from", "the compile phase of inlining code. 0 is silent (except", "' PyObject* raw_globals __attribute__ ((unused));\\n' \\ '#else\\n' \\ ' PyObject*", "\\ ' PyObject* raw_locals;\\n' \\ ' PyObject* raw_globals;\\n' \\ '#endif\\n'", "(.i), platform-specific resource files, or whatever else is recognized by", "locals and globals here...*/ \\n' \\ ' }\\n' catch_code =", "global_dict is None: global_dict = call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast,", ": [string] List of extra files to link with (e.g.", "= inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from . import build_tools compiler =", "the function to the module. if support_code: mod.customize.add_support_code(support_code) # add", "data types. If you'd like to use a different set", "the locals from the function that called # inline. global", "and not generally necessary for Python extensions, which typically export", "variable offers the most hope. function_catalog = catalog.catalog() class inline_ext_function(ext_tools.ext_function):", "*py__globals = NULL;\\n' py_objects = ', '.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects", "C++ code. customize : base_info.custom_info, optional An alternative way to", ": string, value : string|None)] List of macros to define;", "(i.e. \"c\", \"c++\", \"objc\"). Will be detected from the source", "C/C++ code. If local_dict is not specified the local dictionary", "where to store and what to name the extension module", "passed to the C/C++ code by assignment much like variables", "for other platforms it could be anything. extra_link_args : [string]", "global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame = sys._getframe().f_back if local_dict is None: local_dict", "the same compiler that was used when compiling Python. Cygwin's", "defaults to the Microsoft C++ compiler. If this isn't available,", "correct location, with the given compiler and verbosity # setting.", "from Python into the C/C++ code. It defaults to an", "except ValueError: # compile the library module_dir = global_dict.get('__file__',None) func", "Python interpreter). Similar interpretation as for 'extra_compile_args'. export_symbols : [string]", "and global dict # as input. from __future__ import absolute_import,", "of functions, classes, or structures. headers : [str], optional A", "through a special argument called return_val. Also, the contents of", "+ parse_tuple def arg_declaration_code(self): \"\"\"Return the declaration code as a", "be similar. verbose : {0,1,2}, optional Specifies how much information", "than the default, specify them here. Look in the type", "libraries that must be explicitly specified, binary resource files, etc.)", "export_symbols : [string] List of symbols to be exported from", "types to C/C++ data types. If you'd like to use", "setup script lives), in Unix form (slash-separated) for portability. Source", "types here. # This should really have its own error", "by distutils. On Unix, it'll only understand the values understood", "filenames, relative to the distribution root (where the setup script", "for portability). define_macros : [(name : string, value : string|None)]", "\\ ' /*I would like to fill in changed locals", "to use when compiling the code. The list might look", "quite a few options as listed below. Also, the keyword", "looks for mingw32 (the gcc compiler). On Unix, it'll probably", "FOO\" in source or -DFOO on Unix C compiler command", "objects can be changed within the C/C++ code and the", "global_dict is None: global_dict = call_frame.f_globals if force: module_dir =", "try \\n' \\ ' { \\n' \\ '#if defined(__GNUC__) ||", "print('<weave: compiling>') # compile code in correct location, with the", "numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version = '/* NDARRAY API VERSION %x", "customize : base_info.custom_info, optional An alternative way to specify `support_code`,", "% self.name def template_declaration_code(self): code = 'template<class T>\\n' \\ 'static", "isn't available, it looks for mingw32 (the gcc compiler). On", "like to fill in changed locals and globals here...*/ \\n'", "behavior should be similar. verbose : {0,1,2}, optional Specifies how", "to specify `support_code`, `headers`, etc. needed by the function. See", "to store and what to name the extension module #", "= NULL;\\n' py_objects = ', '.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects =", "msvc where it still prints some garbage). 1 informs you", "func in function_list: try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] =", "platform-specific resource files, or whatever else is recognized by the", "except NameError as msg: msg = str(msg).strip() if msg[:16] ==", "';\\n' else: declare_py_objects = '' py_vars = ' = '.join(self.arg_specs.py_variables())", "\\ '#if defined(__GNUC__) || defined(__ICC)\\n' \\ ' PyObject* raw_locals __attribute__", "# 2. try function catalog try: results = attempt_function_call(code,local_dict,global_dict) #", "to the distribution root (where the setup script lives), in", "loaded). extra_objects : [string] List of extra files to link", "+ function_code + \\ ' /*I would like to fill", "dictionary of values that should be used as the global", "for arg in self.arg_specs] return \"\".join(arg_strings) def function_code(self): from .ext_tools", "values understood by distutils. (I should add 'gcc' though to", "in the C/C++ code. Values are passed to the C/C++", "necessary for Python extensions, which typically export exactly one symbol:", "C/C++ code by assignment much like variables passed are passed", "'int exception_occurred = 0;\\n' \\ 'PyObject *py__locals = NULL;\\n' \\", "main documentation for examples. auto_downcast : {1,0}, optional This only", "PyObject* raw_locals __attribute__ ((unused));\\n' \\ ' PyObject* raw_globals __attribute__ ((unused));\\n'", "inline_ext_function(ext_tools.ext_function): # Some specialization is needed for inline extension functions", "to link against. runtime_library_dirs : [string] List of directories to", "def function_declaration_code(self): code = 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return", "type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) return results def", "string|None)] List of macros to define; each macro is defined", "if your having problems getting code to work. Its handy", "# import the module and return the function. Make sure", "changed within the C/C++ code and the changes remain after", "lot. compiler : str, optional The name of compiler to", "return code % self.name def template_declaration_code(self): code = 'template<class T>\\n'", "(self.name, self.name) function_decls = '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' % args return", "path. try: sys.path.insert(0,storage_dir) exec('import ' + module_name) func = eval(module_name+'.compiled_func')", "to create the extension (or to create a new static", "ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1, support_code=None, headers=[], customize=None, type_converters=None,", "indent(self.arg_local_dict_code(),4) try_code = \\ ' try \\n' \\ ' {", "compilation process and can be useful if your having problems", "C/C++ code within Python scripts. ``inline()`` compiles and executes C/C++", "arg in self.arg_specs] return \"\".join(arg_strings) def function_code(self): from .ext_tools import", "cleanup code */ \\n\" + \\ cleanup_code + \\ \"", "compiling starts, finishes, and how long it took. 2 prints", "when compiling Python. Cygwin's behavior should be similar. verbose :", "code % self.name def template_declaration_code(self): code = 'template<class T>\\n' \\", "compiler-specific information to use when compiling the source files in", "Source files may be C, C++, SWIG (.i), platform-specific resource", "**kw): \"\"\" Inline C/C++ code within Python scripts. ``inline()`` compiles", "here, the function wasn't found raise ValueError('function with correct signature", "\"support code\" needed by the function to the module. if", "should assign results that need to be returned to Python", "raise NameError(msg) except KeyError: pass # 2. try function catalog", "# global function_catalog # 1. try local cache try: results", "root (where the setup script lives), in Unix form (slash-separated)", "# should specify argument types here. # This should really", "the keyword arguments for distutils extension modules are accepted to", "specify a return statement. Instead it should assign results that", "that called # inline. global function_catalog call_frame = sys._getframe().f_back if", "(not sure this'll be used much). type_converters : [type converters],", "Python into the C/C++ code. It defaults to an empty", "the calling function is used. force : {0, 1}, optional", "`support_code` a lot. compiler : str, optional The name of", "'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name def", "\\n' \\ + function_code + \\ ' /*I would like", "functions that have numpy arrays as input variables. Setting this", "C/C++ code. It defaults to an empty string. local_dict :", "else: raise NameError(msg) # 3. try persistent catalog module_dir =", "or -DFOO on Unix C compiler command line). undef_macros :", "\\ try_code + \\ indent(catch_code,4) + \\ return_code return all_code", "|| defined(__ICC)\\n' \\ ' PyObject* raw_locals __attribute__ ((unused));\\n' \\ '", "header files to use when compiling the code. The list", "that have numpy arrays as input variables. Setting this to", "%s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name def parse_tuple_code(self): \"\"\"", "\\ 'int exception_occurred = 0;\\n' \\ 'PyObject *py__locals = NULL;\\n'", "a string.\"\"\" arg_strings = [arg.local_dict_code() for arg in self.arg_specs] return", "extension functions def function_declaration_code(self): code = 'static PyObject* %s(PyObject*self, PyObject*", "language (i.e. \"c\", \"c++\", \"objc\"). Will be detected from the", "if global_dict is None: global_dict = call_frame.f_globals if force: module_dir", "like variables passed are passed into a standard Python function.", "[(name : string, value : string|None)] List of macros to", "results = attempt_function_call(code,local_dict,global_dict) return results def attempt_function_call(code,local_dict,global_dict): # we try", "the changes remain after the C code exits and returns", "is the locals from the function that called # inline.", "should not specify a return statement. Instead it should assign", "2. try function catalog try: results = attempt_function_call(code,local_dict,global_dict) # 3.", "and globals here...*/ \\n' \\ ' }\\n' catch_code = \"catch(...)", "' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /* argument conversion code", "values that should be used as the global scope for", "the function except ValueError: # compile the library module_dir =", "a standard Python function. Values are returned from the C/C++", "Also, the contents of mutable objects can be changed within", "your compiled function. This could be declarations of functions, classes,", "code got a lot uglier when I added local_dict... \"\"\"", "to fill in changed locals and globals here...*/ \\n' \\", "0 is silent (except on windows with msvc where it", "handy for finding the name of the .cpp file if", "need to examine it. verbose has no effect if the", "Values are returned from the C/C++ code through a special", "# inline. global function_catalog call_frame = sys._getframe().f_back if local_dict is", "by your compiled function. This could be declarations of functions,", "global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir) for func in function_list: try: results", "verbose : {0,1,2}, optional Specifies how much information is printed", "macro is defined using a 2-tuple, where 'value' is either", "3 levels here -- a local cache first, then the", "NULL;\\n\\n' else: init_values = '' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\", "/*I would like to fill in changed locals and globals", "how to define that yet. msg = str(msg) if msg[:16]", "exactly one symbol: \"init\" + extension_name. swig_opts : [string] Any", "= global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code, headers=headers,", "try_code = \\ ' try \\n' \\ ' { \\n'", "use the same compiler that was used when compiling Python.", "None: global_dict = call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from", "to pass to SWIG if a source file has the", "of type float. If even one of the arrays has", "PyObject* args)\\n{\\n' return code % self.name def template_declaration_code(self): code =", "a local and global dict # as input. from __future__", "well as all the compiler names understood by distutils. On", "time inline is called. This is really only useful for", "**kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) else: # 1. try local", "Cygwin's behavior should be similar. verbose : {0,1,2}, optional Specifies", "to define that yet. msg = str(msg) if msg[:16] ==", "if py_objects: declare_py_objects = 'PyObject ' + py_objects + ';\\n'", "# 2. try catalog cache. function_list = function_catalog.get_functions_fast(code) for func", "explicitly specified, binary resource files, etc.) extra_compile_args : [string] Any", "may be C, C++, SWIG (.i), platform-specific resource files, or", "together to create the extension (or to create a new", "# how to define that yet. msg = str(msg) if", "If specified, it is a dictionary of values that should", "= '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' % args return function_decls class inline_ext_module(ext_tools.ext_module):", "support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, **kw): # figure out where", "dict # as input. from __future__ import absolute_import, print_function import", "\"} \\n\" all_code = self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4) + \\", "+ \\ \" return_val = py::object(); \\n\" \\ \" exception_occurred", "be needed by your compiled function. This could be declarations", "strings need to be in a form than can be", "func return results except TypeError as msg: # should specify", "the module customization. if customize: mod.customize = customize # add", "here. # This should really have its own error type,", "[string] List of macros to undefine explicitly. library_dirs : [string]", "on all platforms, and not generally necessary for Python extensions,", "convenience: sources : [string] List of source filenames, relative to", "either the string to define it to or None to", "the calling function is used. global_dict : dict, optional If", "files (in Unix form for portability). define_macros : [(name :", "distutils. On Unix, it'll only understand the values understood by", "for convenience: sources : [string] List of source filenames, relative", "catalog cache, and then persistent catalog. # global function_catalog #", "= attempt_function_call(code,local_dict,global_dict) # 3. build the function except ValueError: #", "'PyObject *py__locals = NULL;\\n' \\ 'PyObject *py__globals = NULL;\\n' py_objects", "apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results except TypeError as", "to the Microsoft C++ compiler. If this isn't available, it", "files in 'sources'. For platforms and compilers where \"command line\"", "or double complex, all variables maintain their standard types. newarr_converter", "optional Specifies how much information is printed during the compile", "/* inline code */ \\n' \\ + function_code + \\", "declare_return = 'py::object return_val;\\n' \\ 'int exception_occurred = 0;\\n' \\", "= [arg.declaration_code(inline=1) for arg in self.arg_specs] return \"\".join(arg_strings) def arg_cleanup_code(self):", "setting. All input keywords are passed through to distutils mod.compile(location=storage_dir,compiler=compiler,", "function_code(self): from .ext_tools import indent decl_code = indent(self.arg_declaration_code(),4) cleanup_code =", "information needed for compiling. Parameters ---------- code : string A", "it is a dictionary of values that should be used", "msg: msg = str(msg).strip() if msg[:16] == \"Conversion Error\": pass", "auto_downcast=1, newarr_converter=0, **kw): \"\"\" Inline C/C++ code within Python scripts.", "# 1. try local cache try: results = apply(function_cache[code],(local_dict,global_dict)) return", "a particular value (equivalent of \"#define FOO\" in source or", "and returns to Python. inline has quite a few options", "if you need to examine it. verbose has no effect", "but I don't know # how to define that yet.", "symbol: \"init\" + extension_name. swig_opts : [string] Any extra options", "if customize: mod.customize = customize # add the extra \"support", "structures. headers : [str], optional A list of strings specifying", "This is really only useful for debugging, and probably only", "of macros to define; each macro is defined using a", "from __future__ import absolute_import, print_function import sys import os from", "attempt_function_call(code,local_dict,global_dict) # 3. build the function except ValueError: # compile", "1, the C++ code is compiled every time inline is", "to be exported from a shared extension. Not used on", "return function_decls class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache =", ":mod:`scipy.weave.base_info` for more details. (not sure this'll be used much).", "probably use the same compiler that was used when compiling", "are also available in the C/C++ code. Values are passed", "needed by the function to the module. if support_code: mod.customize.add_support_code(support_code)", ": {1,0}, optional This only affects functions that have numpy", "be used as the global scope for the C/C++ code.", "string A string of valid C++ code. It should not", "\\ \" return return_val.disown(); \\n\" \\ \"} \\n\" all_code =", "type float. If even one of the arrays has type", "\"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return the cleanup code as a string.\"\"\"", "\\ ' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n'", "in source or -DFOO on Unix C compiler command line).", "\\n\" \\ \" }\\n \\n\" \\ \" return return_val.disown(); \\n\"", "instead of double if all the Numeric arrays are of", "from . import common_info from numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version =", "time. libraries : [string] List of library names (not filenames", "local_dict is None: local_dict = call_frame.f_locals if global_dict is None:", "= str(msg) if msg[:16] == \"Conversion Error\": pass else: raise", "executes C/C++ code on the fly. Variables in the local", "in the C++ code. customize : base_info.custom_info, optional An alternative", "compiled, as the # slowdown is very noticeable. if verbose", "to name the extension module # that will contain the", "it'll only understand the values understood by distutils. (I should", "if global_dict is None: global_dict = call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names,", "much information is printed during the compile phase of inlining", "\"init\" + extension_name. swig_opts : [string] Any extra options to", "language : string Extension language (i.e. \"c\", \"c++\", \"objc\"). Will", "source extensions if not provided. See Also -------- distutils.extension.Extension :", "' }\\n' catch_code = \"catch(...) \\n\" \\ \"{ \\n\" +", "on Unix C compiler command line). undef_macros : [string] List", "indent decl_code = indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4)", "= attempt_function_call(code,local_dict,global_dict) return results def attempt_function_call(code,local_dict,global_dict): # we try 3", "for arg in self.arg_specs] return \"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return the", "the source extensions if not provided. See Also -------- distutils.extension.Extension", "def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1,", "A string of valid C++ code declaring extra code that", "got a lot uglier when I added local_dict... \"\"\" declare_return", "list of strings specifying header files to use when compiling", "# setting. All input keywords are passed through to distutils", "def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='', verbose=0, support_code=None, headers=[], customize=None, type_converters=None,", "raise NameError(msg) except KeyError: pass # 2. try catalog cache.", "mod.customize.add_support_code(support_code) # add the extra headers needed by the function", "is when the extension is loaded). extra_objects : [string] List", "the extension is loaded). extra_objects : [string] List of extra", "function_cache[code] = func return results except: # should specify argument", "needed by the function. See :mod:`scipy.weave.base_info` for more details. (not", "else: raise NameError(msg) except KeyError: pass # 2. try function", "function_cache = {} def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='', verbose=0, support_code=None,", "# the directory where it lives is in the python", "took. 2 prints out the command lines for the compilation", "support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw): \"\"\" Inline C/C++", "gcc compiler). On Unix, it'll probably use the same compiler", "directories to search for C/C++ header files (in Unix form", "paths) to link against. runtime_library_dirs : [string] List of directories", "I don't know # how to define that yet. msg", "if customize (a custom_info object), then set the module customization.", "this list include_dirs : [string] List of directories to search", "end of a ``#include`` statement in the C++ code. customize", "of valid C++ code. It should not specify a return", "\"build_ext\" command as source for a Python extension. .. note::", "the *previous* call # frame -- that is the locals", "static libraries that must be explicitly specified, binary resource files,", "module_name = os.path.split(module_path) mod = inline_ext_module(module_name,compiler) # create the function.", "form for portability). define_macros : [(name : string, value :", "is called. This is really only useful for debugging, and", "should specify argument types here. pass # if we get", "for examples. auto_downcast : {1,0}, optional This only affects functions", "empty string. local_dict : dict, optional If specified, it is", "% (_get_ndarray_c_version(),) # not an easy way for the user_path_list", "\\n' \\ + decl_code + \\ ' /* inline code", "different set of type conversions than the default, specify them", "compiled every time inline is called. This is really only", "the function to the module. for header in headers: mod.customize.add_header(header)", "if not provided. See Also -------- distutils.extension.Extension : Describes additional", "global_dict=None, force=0, compiler='', verbose=0, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, newarr_converter=0,", "the module and return the function. Make sure # the", "code by assignment much like variables passed are passed into", "within the C/C++ code and the changes remain after the", "function to the module. for header in headers: mod.customize.add_header(header) #", "= function_catalog.unique_module_name(code, module_dir) storage_dir, module_name = os.path.split(module_path) mod = inline_ext_module(module_name,compiler)", "name of compiler to use when compiling. On windows, it", "argument types here. pass # if we get here, the", "'#if defined(__GNUC__) || defined(__ICC)\\n' \\ ' PyObject* raw_locals __attribute__ ((unused));\\n'", "Python. inline has quite a few options as listed below.", "for 'extra_compile_args'. export_symbols : [string] List of symbols to be", "code in correct location, with the given compiler and verbosity", "all PyObjects are done also. This code got a lot", "the main documentation for examples. auto_downcast : {1,0}, optional This", "= str(msg).strip() if msg[:16] == \"Conversion Error\": pass else: raise", "the command lines for the compilation process and can be", "\\ 'PyObject *py__globals = NULL;\\n' py_objects = ', '.join(self.arg_specs.py_pointers()) if", "assignment much like variables passed are passed into a standard", "understood by distutils. (I should add 'gcc' though to this).", "\"\".join(arg_strings) def function_code(self): from .ext_tools import indent decl_code = indent(self.arg_declaration_code(),4)", "uglier when I added local_dict... \"\"\" declare_return = 'py::object return_val;\\n'", "2-tuple, where 'value' is either the string to define it", "code */ \\n\" + \\ cleanup_code + \\ \" if(!(PyObject*)return_val", "{} def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='', verbose=0, support_code=None, headers=[], customize=None,", "appended to the front of this list include_dirs : [string]", "in correct location, with the given compiler and verbosity #", "+ \\ \" if(!(PyObject*)return_val && !exception_occurred)\\n\" \\ \" {\\n \\n\"", "local and global dict # as input. from __future__ import", "wasn't found raise ValueError('function with correct signature not found') def", "Variables in the local and global Python scope are also", "this is when the extension is loaded). extra_objects : [string]", "Py_None; \\n\" \\ \" }\\n \\n\" \\ \" return return_val.disown();", "needed by your compiled function. This could be declarations of", "+ py_objects + ';\\n' else: declare_py_objects = '' py_vars =", "catalog.intermediate_dir() code = ndarray_api_version + '\\n' + code module_path =", "and return the function. Make sure # the directory where", "'#else\\n' \\ ' PyObject* raw_locals;\\n' \\ ' PyObject* raw_globals;\\n' \\", "return results def attempt_function_call(code,local_dict,global_dict): # we try 3 levels here", "then persistent catalog. # global function_catalog # 1. try local", "# 3. try persistent catalog module_dir = global_dict.get('__file__',None) function_list =", "from numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version = '/* NDARRAY API VERSION", "code. The list might look like ``[\"<vector>\",\"'my_header'\"]``. Note that the", "PyObject* raw_globals __attribute__ ((unused));\\n' \\ '#else\\n' \\ ' PyObject* raw_locals;\\n'", "list of Python variable names that should be transferred from", "slowdown is very noticeable. if verbose > 0: print('<weave: compiling>')", "ndarray_api_version = '/* NDARRAY API VERSION %x */' % (_get_ndarray_c_version(),)", "= [arg.local_dict_code() for arg in self.arg_specs] return \"\".join(arg_strings) def function_code(self):", "data types to C/C++ data types. If you'd like to", "for C/C++ libraries at link time. libraries : [string] List", "levels here -- a local cache first, then the #", "If even one of the arrays has type double or", "libraries at run time (for shared extensions, this is when", "and then persistent catalog. # global function_catalog # 1. try", "changed locals and globals here...*/ \\n' \\ ' }\\n' catch_code", "the type conversions section of the main documentation for examples.", "= 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name", "build_tools compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir,", "(not filenames or paths) to link against. runtime_library_dirs : [string]", "*previous* call # frame -- that is the locals from", "the directory where it lives is in the python path.", "know # how to define that yet. msg = str(msg)", "macros to undefine explicitly. library_dirs : [string] List of directories", "> 0: print('<weave: compiling>') # compile code in correct location,", "apply(function_cache[code],(local_dict,global_dict)) return results except TypeError as msg: msg = str(msg).strip()", "what to name the extension module # that will contain", "specify argument types here. pass # if we get here,", "the extension module # that will contain the function. #", "code to work. Its handy for finding the name of", "((unused));\\n' \\ ' PyObject* raw_globals __attribute__ ((unused));\\n' \\ '#else\\n' \\", "from .ext_tools import indent decl_code = indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4)", "then the # catalog cache, and then persistent catalog. #", "'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ ' return NULL;\\n' return declare_return + declare_py_objects", "only affects functions that have numpy arrays as input variables.", "catalog from . import common_info from numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version", "recognized by the \"build_ext\" command as source for a Python", "new static Python interpreter). Similar interpretation as for 'extra_compile_args'. export_symbols", "valid C++ code declaring extra code that might be needed", "= NULL;\\n\\n' else: init_values = '' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\", "local_dict : dict, optional If specified, it is a dictionary", "global scope for the C/C++ code. If `global_dict` is not", "of mutable objects can be changed within the C/C++ code", "= customize # add the extra \"support code\" needed by", "is needed for inline extension functions def function_declaration_code(self): code =", "TypeError as msg: # should specify argument types here. #", "Python scope are also available in the C/C++ code. Values", "arg_local_dict_code(self): \"\"\"Return the code to create the local dict as", "\"Conversion Error\": pass else: raise NameError(msg) # 3. try persistent", "All input keywords are passed through to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose,", "Look in the type conversions section of the main documentation", "be declarations of functions, classes, or structures. headers : [str],", "attempt_function_call(code,local_dict,global_dict) return results def attempt_function_call(code,local_dict,global_dict): # we try 3 levels", "code. If local_dict is not specified the local dictionary of", "to C/C++ data types. If you'd like to use a", "when compiling starts, finishes, and how long it took. 2", "+ code module_path = function_catalog.unique_module_name(code, module_dir) storage_dir, module_name = os.path.split(module_path)", "class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache = {} def", "classes, or structures. headers : [str], optional A list of", "\\n\" + \\ \" return_val = py::object(); \\n\" \\ \"", "much). type_converters : [type converters], optional These guys are what", "results that need to be returned to Python in the", "= 1; \\n\" \\ \"} \\n\" return_code = \" /*", "time (for shared extensions, this is when the extension is", "Make sure # the directory where it lives is in", "'PyObject ' + py_objects + ';\\n' else: declare_py_objects = ''", "grabs the local variables from the *previous* call # frame", "from . import catalog from . import common_info from numpy.core.multiarray", "local cache first, then the # catalog cache, and then", "to Python. inline has quite a few options as listed", "str(msg).strip() if msg[:16] == \"Conversion Error\": pass else: raise NameError(msg)", "information is printed during the compile phase of inlining code.", ": {0,1,2}, optional Specifies how much information is printed during", "some garbage). 1 informs you when compiling starts, finishes, and", "input variables. Setting this to 1 will cause all floating", "or paths) to link against. runtime_library_dirs : [string] List of", "function_decls = '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' % args return function_decls class", "import sys import os from . import ext_tools from .", "details. (not sure this'll be used much). type_converters : [type", "for the C/C++ code. If `global_dict` is not specified, the", "of the message, but I don't know # how to", "the Microsoft C++ compiler. If this isn't available, it looks", "finding the name of the .cpp file if you need", "For platforms and compilers where \"command line\" makes sense, this", "an easy way for the user_path_list to come in here.", "msg: # should specify argument types here. # This should", "# slowdown is very noticeable. if verbose > 0: print('<weave:", "all platforms, and not generally necessary for Python extensions, which", "affects functions that have numpy arrays as input variables. Setting", "compiler : str, optional The name of compiler to use", "This code got a lot uglier when I added local_dict...", "the compiler defaults to the Microsoft C++ compiler. If this", "error type, instead of # checking the beginning of the", "are of type float. If even one of the arrays", "silent (except on windows with msvc where it still prints", "cast as float instead of double if all the Numeric", "inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from . import build_tools compiler = build_tools.choose_compiler(compiler)", "extra files to link with (e.g. object files not implied", "when the extension is loaded). extra_objects : [string] List of", "\"Conversion Error\": pass else: raise NameError(msg) except KeyError: pass #", "Python extensions, which typically export exactly one symbol: \"init\" +", "Unused. Other Parameters ---------------- Relevant :mod:`distutils` keywords. These are duplicated", "inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache = {} def inline(code,arg_names=[],local_dict=None,", "arg in self.arg_specs] return \"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return the cleanup", "'value' is either the string to define it to or", "double if all the Numeric arrays are of type float.", "``#include`` statement in the C++ code. customize : base_info.custom_info, optional", "-- a local cache first, then the # catalog cache,", "to the front of this list include_dirs : [string] List", "class for convenience: sources : [string] List of source filenames,", "0: print('<weave: compiling>') # compile code in correct location, with", "\\n\" \\ \" return_val = Py_None; \\n\" \\ \" }\\n", "[string] List of symbols to be exported from a shared", "swig_opts : [string] Any extra options to pass to SWIG", "build the function except ValueError: # compile the library module_dir", "# 3. build the function except ValueError: # compile the", "+ \\ cleanup_code + \\ \" if(!(PyObject*)return_val && !exception_occurred)\\n\" \\", "is either the string to define it to or None", "at run time (for shared extensions, this is when the", "+ decl_code + \\ ' /* inline code */ \\n'", "function_catalog.unique_module_name(code, module_dir) storage_dir, module_name = os.path.split(module_path) mod = inline_ext_module(module_name,compiler) #", "and probably only useful if your editing `support_code` a lot.", "= catalog.intermediate_dir() code = ndarray_api_version + '\\n' + code module_path", "# compile the library module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict,", "a local cache first, then the # catalog cache, and", "If local_dict is not specified the local dictionary of the", "Any extra platform- and compiler-specific information to use when linking", "compiler='', verbose=1, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, **kw): # figure", "binary resource files, etc.) extra_compile_args : [string] Any extra platform-", "msg = str(msg).strip() if msg[:16] == \"Conversion Error\": pass else:", "= ' = '.join(self.arg_specs.py_variables()) if py_vars: init_values = py_vars +", "auto_downcast and # type factories setting ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast,", "create the local dict as a string.\"\"\" arg_strings = [arg.local_dict_code()", "has quite a few options as listed below. Also, the", "\"\"\"Return the cleanup code as a string.\"\"\" arg_strings = [arg.cleanup_code()", "raise NameError(msg) # 3. try persistent catalog module_dir = global_dict.get('__file__',None)", "be useful if your having problems getting code to work.", "we try 3 levels here -- a local cache first,", "the code. The list might look like ``[\"<vector>\",\"'my_header'\"]``. Note that", "then set the module customization. if customize: mod.customize = customize", "should be used as the global scope for the C/C++", "relative to the distribution root (where the setup script lives),", "files, or whatever else is recognized by the \"build_ext\" command", "**kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) return results def attempt_function_call(code,local_dict,global_dict): #", "one symbol: \"init\" + extension_name. swig_opts : [string] Any extra", "\\ ' PyObject* raw_globals __attribute__ ((unused));\\n' \\ '#else\\n' \\ '", "extra information needed for compiling. Parameters ---------- code : string", "Values are passed to the C/C++ code by assignment much", "py_objects + ';\\n' else: declare_py_objects = '' py_vars = '", "None: global_dict = call_frame.f_globals if force: module_dir = global_dict.get('__file__',None) func", "raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /* argument conversion code */", "VERSION %x */' % (_get_ndarray_c_version(),) # not an easy way", "type_converters=None, auto_downcast=1, newarr_converter=0, **kw): \"\"\" Inline C/C++ code within Python", "code on the fly. Variables in the local and global", "get here, the function wasn't found raise ValueError('function with correct", "a different set of type conversions than the default, specify", "headers needed by the function to the module. for header", "[str], optional A list of strings specifying header files to", "mingw32 (the gcc compiler). On Unix, it'll probably use the", "to come in here. # the PYTHONCOMPILED environment variable offers", "arg_cleanup_code(self): \"\"\"Return the cleanup code as a string.\"\"\" arg_strings =", "inline_ext_module(module_name,compiler) # create the function. This relies on the auto_downcast", "is loaded). extra_objects : [string] List of extra files to", "for inline extension functions def function_declaration_code(self): code = 'static PyObject*", ": Describes additional parameters. \"\"\" # this grabs the local", "arg_strings = [arg.local_dict_code() for arg in self.arg_specs] return \"\".join(arg_strings) def", "function_code + \\ ' /*I would like to fill in", "value : string|None)] List of macros to define; each macro", "remain after the C code exits and returns to Python.", "init_values = '' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ ' return", "a string.\"\"\" arg_strings = [arg.declaration_code(inline=1) for arg in self.arg_specs] return", "of values that should be used as the local scope", "block for PyArg_ParseTuple. Variable declarations for all PyObjects are done", "Any extra platform- and compiler-specific information to use when compiling", "try catalog cache. function_list = function_catalog.get_functions_fast(code) for func in function_list:", "(where the setup script lives), in Unix form (slash-separated) for", "defined using a 2-tuple, where 'value' is either the string", "location, with the given compiler and verbosity # setting. All", "def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache = {} def inline(code,arg_names=[],local_dict=None, global_dict=None,", "of inlining code. 0 is silent (except on windows with", "try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results", "module and return the function. Make sure # the directory", "is in the python path. try: sys.path.insert(0,storage_dir) exec('import ' +", "specify argument types here. # This should really have its", "garbage). 1 informs you when compiling starts, finishes, and how", "inlining code. 0 is silent (except on windows with msvc", "in Unix form (slash-separated) for portability. Source files may be", "headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) return", "a lot uglier when I added local_dict... \"\"\" declare_return =", "`return_val`. arg_names : [str], optional A list of Python variable", "ValueError: # compile the library module_dir = global_dict.get('__file__',None) func =", "code and the changes remain after the C code exits", "the `return_val`. arg_names : [str], optional A list of Python", "if verbose > 0: print('<weave: compiling>') # compile code in", "List of symbols to be exported from a shared extension.", "mutable objects can be changed within the C/C++ code and", "SWIG (.i), platform-specific resource files, or whatever else is recognized", "C/C++ code. Values are passed to the C/C++ code by", "store and what to name the extension module # that", "could be anything. extra_link_args : [string] Any extra platform- and", "local_dict... \"\"\" declare_return = 'py::object return_val;\\n' \\ 'int exception_occurred =", "might look like ``[\"<vector>\",\"'my_header'\"]``. Note that the header strings need", "windows, it understands 'msvc' and 'gcc' as well as all", "without a particular value (equivalent of \"#define FOO\" in source", "cache try: results = apply(function_cache[code],(local_dict,global_dict)) return results except TypeError as", "it lives is in the python path. try: sys.path.insert(0,storage_dir) exec('import", "\\ \"} \\n\" return_code = \" /* cleanup code */", "mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) # import the module and return the", "customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) return results", "= inline_ext_module(module_name,compiler) # create the function. This relies on the", "values that should be used as the local scope for", "phase of inlining code. 0 is silent (except on windows", "much like variables passed are passed into a standard Python", "'&py__locals,'\\ '&py__globals))\\n'\\ ' return NULL;\\n' return declare_return + declare_py_objects +", "\\ \" return_val = Py_None; \\n\" \\ \" }\\n \\n\"", "\\ return_code return all_code def python_function_definition_code(self): args = (self.name, self.name)", "function_catalog.fast_cache(code,func) function_cache[code] = func return results except: # should specify", "msg = str(msg) if msg[:16] == \"Conversion Error\": pass else:", "return \"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return the cleanup code as a", "PyArg_ParseTuple. Variable declarations for all PyObjects are done also. This", "compiled functions to take a local and global dict #", "when I added local_dict... \"\"\" declare_return = 'py::object return_val;\\n' \\", "= \"catch(...) \\n\" \\ \"{ \\n\" + \\ \" return_val", "((unused));\\n' \\ '#else\\n' \\ ' PyObject* raw_locals;\\n' \\ ' PyObject*", "ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache = {} def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='',", "source for a Python extension. .. note:: The `module_path` file", "as a string.\"\"\" arg_strings = [arg.declaration_code(inline=1) for arg in self.arg_specs]", "global function_catalog # 1. try local cache try: results =", "# catalog cache, and then persistent catalog. # global function_catalog", "sys._getframe().f_back if local_dict is None: local_dict = call_frame.f_locals if global_dict", "return_code = \" /* cleanup code */ \\n\" + \\", "scope for the C/C++ code. If `global_dict` is not specified,", "\" return return_val.disown(); \\n\" \\ \"} \\n\" all_code = self.function_declaration_code()", "to define; each macro is defined using a 2-tuple, where", "optional If specified, it is a dictionary of values that", "function_catalog # 1. try local cache try: results = apply(function_cache[code],(local_dict,global_dict))", "out where to store and what to name the extension", "specified, it is a dictionary of values that should be", "of files that the extension depends on. language : string", "of double if all the Numeric arrays are of type", "the function that called # inline. global function_catalog call_frame =", "\"} \\n\" return_code = \" /* cleanup code */ \\n\"", "is very noticeable. if verbose > 0: print('<weave: compiling>') #", "the name of the .cpp file if you need to", "add 'gcc' though to this). On windows, the compiler defaults", "must be explicitly specified, binary resource files, etc.) extra_compile_args :", "# this grabs the local variables from the *previous* call", "module. for header in headers: mod.customize.add_header(header) # it's nice to", "need to be returned to Python in the `return_val`. arg_names", "if all the Numeric arrays are of type float. If", "specialization is needed for inline extension functions def function_declaration_code(self): code", "results except TypeError as msg: msg = str(msg).strip() if msg[:16]", "' /*I would like to fill in changed locals and", "called return_val. Also, the contents of mutable objects can be", "\\n\" \\ \" return return_val.disown(); \\n\" \\ \"} \\n\" all_code", "information to use when compiling the source files in 'sources'.", "duplicated from <NAME>'s :class:`distutils.extension.Extension` class for convenience: sources : [string]", "filenames or paths) to link against. runtime_library_dirs : [string] List", "process and can be useful if your having problems getting", "what convert Python data types to C/C++ data types. If", "declare_py_objects = '' py_vars = ' = '.join(self.arg_specs.py_variables()) if py_vars:", "set the module customization. if customize: mod.customize = customize #", "re-write compiled functions to take a local and global dict", "getting code to work. Its handy for finding the name", "lives), in Unix form (slash-separated) for portability. Source files may", "list of command-line arguments, but for other platforms it could", "them here. Look in the type conversions section of the", "used as the global scope for the C/C++ code. If", "# not an easy way for the user_path_list to come", "*/ \\n\" + \\ cleanup_code + \\ \" if(!(PyObject*)return_val &&", "else: raise NameError(msg) except KeyError: pass # 2. try catalog", "platform- and compiler-specific information to use when compiling the source", "args = (self.name, self.name) function_decls = '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' %", "'{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' % args return function_decls class inline_ext_module(ext_tools.ext_module): def", "as listed below. Also, the keyword arguments for distutils extension", "List of directories to search for C/C++ libraries at run", "init_values = py_vars + ' = NULL;\\n\\n' else: init_values =", "+ declare_py_objects + \\ init_values + parse_tuple def arg_declaration_code(self): \"\"\"Return", "pass else: raise TypeError(msg) except NameError as msg: msg =", "auto_downcast : {1,0}, optional This only affects functions that have", "to search for C/C++ header files (in Unix form for", "Also -------- distutils.extension.Extension : Describes additional parameters. \"\"\" # this", "pass # 2. try catalog cache. function_list = function_catalog.get_functions_fast(code) for", "same compiler that was used when compiling Python. Cygwin's behavior", "Create code block for PyArg_ParseTuple. Variable declarations for all PyObjects", "starts, finishes, and how long it took. 2 prints out", ": string|None)] List of macros to define; each macro is", "[string] Any extra options to pass to SWIG if a", "to work. Its handy for finding the name of the", "function_catalog = catalog.catalog() class inline_ext_function(ext_tools.ext_function): # Some specialization is needed", "the declaration code as a string.\"\"\" arg_strings = [arg.declaration_code(inline=1) for", "of Python variable names that should be transferred from Python", "the C code exits and returns to Python. inline has", "try 3 levels here -- a local cache first, then", "global Python scope are also available in the C/C++ code.", "storage_dir, module_name = os.path.split(module_path) mod = inline_ext_module(module_name,compiler) # create the", "persistent catalog module_dir = global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir) for func", "not provided. See Also -------- distutils.extension.Extension : Describes additional parameters.", "try function catalog try: results = attempt_function_call(code,local_dict,global_dict) # 3. build", "in function_list: try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func", "need to be in a form than can be pasted", "\"c++\", \"objc\"). Will be detected from the source extensions if", "be transferred from Python into the C/C++ code. It defaults", "only useful for debugging, and probably only useful if your", "It should not specify a return statement. Instead it should", "argument conversion code */ \\n' \\ + decl_code + \\", "\\ indent(self.parse_tuple_code(),4) + \\ try_code + \\ indent(catch_code,4) + \\", "changes remain after the C code exits and returns to", "import catalog from . import common_info from numpy.core.multiarray import _get_ndarray_c_version", "pass # 2. try function catalog try: results = attempt_function_call(code,local_dict,global_dict)", "understand the values understood by distutils. (I should add 'gcc'", "__future__ import absolute_import, print_function import sys import os from .", "type_converters=None, auto_downcast=1, **kw): # figure out where to store and", "relies on the auto_downcast and # type factories setting ext_func", "# storage_dir = catalog.intermediate_dir() code = ndarray_api_version + '\\n' +", "type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) else: # 1.", "function. # storage_dir = catalog.intermediate_dir() code = ndarray_api_version + '\\n'", "offers the most hope. function_catalog = catalog.catalog() class inline_ext_function(ext_tools.ext_function): #", "0;\\n' \\ 'PyObject *py__locals = NULL;\\n' \\ 'PyObject *py__globals =", "distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) # import the module and return", "not specified the local dictionary of the calling function is", "extra \"support code\" needed by the function to the module.", "= py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /* argument conversion code */ \\n'", "understands 'msvc' and 'gcc' as well as all the compiler", ".cpp file if you need to examine it. verbose has", "---------- code : string A string of valid C++ code.", "a 2-tuple, where 'value' is either the string to define", "function_list: try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return", "extra headers needed by the function to the module. for", "extra code that might be needed by your compiled function.", "'' py_vars = ' = '.join(self.arg_specs.py_variables()) if py_vars: init_values =", "passed into a standard Python function. Values are returned from", "compiler-specific information to use when linking object files together to", "you need to examine it. verbose has no effect if", "available in the C/C++ code. Values are passed to the", "global function_catalog call_frame = sys._getframe().f_back if local_dict is None: local_dict", "TypeError as msg: msg = str(msg).strip() if msg[:16] == \"Conversion", "\" exception_occurred = 1; \\n\" \\ \"} \\n\" return_code =", "cleanup_code + \\ \" if(!(PyObject*)return_val && !exception_occurred)\\n\" \\ \" {\\n", "the most hope. function_catalog = catalog.catalog() class inline_ext_function(ext_tools.ext_function): # Some", "into the C/C++ code. It defaults to an empty string.", "extension depends on. language : string Extension language (i.e. \"c\",", "if the compilation isn't necessary. support_code : str, optional A", "+ ' = NULL;\\n\\n' else: init_values = '' parse_tuple =", "raise TypeError(msg) except NameError as msg: msg = str(msg).strip() if", "useful for debugging, and probably only useful if your editing", "noticeable. if verbose > 0: print('<weave: compiling>') # compile code", "for func in function_list: try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code]", "that is the locals from the function that called #", "return results except TypeError as msg: msg = str(msg).strip() if", "shared extensions, this is when the extension is loaded). extra_objects", "import _get_ndarray_c_version ndarray_api_version = '/* NDARRAY API VERSION %x */'", "= indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4) # local_dict_code = indent(self.arg_local_dict_code(),4) try_code", ": [str], optional A list of strings specifying header files", "= os.path.split(module_path) mod = inline_ext_module(module_name,compiler) # create the function. This", "raw_globals;\\n' \\ '#endif\\n' \\ ' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\ '", "Error\": pass else: raise TypeError(msg) except NameError as msg: msg", "declare_return + declare_py_objects + \\ init_values + parse_tuple def arg_declaration_code(self):", "exported from a shared extension. Not used on all platforms,", ": [string] List of source filenames, relative to the distribution", "explicitly. library_dirs : [string] List of directories to search for", "is really only useful for debugging, and probably only useful", "the front of this list include_dirs : [string] List of", "message, but I don't know # how to define that", "A list of Python variable names that should be transferred", "\\ ' try \\n' \\ ' { \\n' \\ '#if", "\"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return the code to create the local", "the Numeric arrays are of type float. If even one", "a lot. compiler : str, optional The name of compiler", "to be returned to Python in the `return_val`. arg_names :", "mod.customize = customize # add the extra \"support code\" needed", "in the local and global Python scope are also available", "that might be needed by your compiled function. This could", "for more details. (not sure this'll be used much). type_converters", "code = 'template<class T>\\n' \\ 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n'", "function except ValueError: # compile the library module_dir = global_dict.get('__file__',None)", "to the module. if support_code: mod.customize.add_support_code(support_code) # add the extra", "here...*/ \\n' \\ ' }\\n' catch_code = \"catch(...) \\n\" \\", "and # type factories setting ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters)", "= {} def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='', verbose=0, support_code=None, headers=[],", "transferred from Python into the C/C++ code. It defaults to", "Also, the keyword arguments for distutils extension modules are accepted", "are accepted to specify extra information needed for compiling. Parameters", "return statement. Instead it should assign results that need to", "platforms and compilers where \"command line\" makes sense, this is", "called. This is really only useful for debugging, and probably", "raw_globals __attribute__ ((unused));\\n' \\ '#else\\n' \\ ' PyObject* raw_locals;\\n' \\", "files that the extension depends on. language : string Extension", "typically export exactly one symbol: \"init\" + extension_name. swig_opts :", "line). undef_macros : [string] List of macros to undefine explicitly.", "the # catalog cache, and then persistent catalog. # global", "environment variable offers the most hope. function_catalog = catalog.catalog() class", "and executes C/C++ code on the fly. Variables in the", "These guys are what convert Python data types to C/C++", "library_dirs : [string] List of directories to search for C/C++", "examples. auto_downcast : {1,0}, optional This only affects functions that", "that need to be returned to Python in the `return_val`.", "is not specified, the global dictionary of the calling function", "Parameters ---------- code : string A string of valid C++", "Python extension. .. note:: The `module_path` file is always appended", "by the function to the module. if support_code: mod.customize.add_support_code(support_code) #", "decl_code = indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4) #", "func return results except: # should specify argument types here.", "`global_dict` is not specified, the global dictionary of the calling", "of the arrays has type double or double complex, all", "it without a particular value (equivalent of \"#define FOO\" in", ": [string] List of files that the extension depends on.", "an empty string. local_dict : dict, optional If specified, it", "don't know # how to define that yet. msg =", "done also. This code got a lot uglier when I", "common_info from numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version = '/* NDARRAY API", "scripts. ``inline()`` compiles and executes C/C++ code on the fly.", "arguments for distutils extension modules are accepted to specify extra", "sure # the directory where it lives is in the", "it looks for mingw32 (the gcc compiler). On Unix, it'll", "optional The name of compiler to use when compiling. On", "name of the .cpp file if you need to examine", "statement in the C++ code. customize : base_info.custom_info, optional An", "try: results = apply(function_cache[code],(local_dict,global_dict)) return results except TypeError as msg:", "all floating point values to be cast as float instead", "# figure out where to store and what to name", "files may be C, C++, SWIG (.i), platform-specific resource files,", "+ \\ ' /*I would like to fill in changed", "by the function to the module. for header in headers:", "'py::object return_val;\\n' \\ 'int exception_occurred = 0;\\n' \\ 'PyObject *py__locals", "variables passed are passed into a standard Python function. Values", ".i extension. depends : [string] List of files that the", "used on all platforms, and not generally necessary for Python", "\\ ' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /* argument conversion", "no effect if the compilation isn't necessary. support_code : str,", "and compiler-specific information to use when linking object files together", "force: module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose,", "+ \\ ' /* inline code */ \\n' \\ +", "local scope for the C/C++ code. If local_dict is not", "assign results that need to be returned to Python in", "List of directories to search for C/C++ header files (in", "for mingw32 (the gcc compiler). On Unix, it'll probably use", "on windows with msvc where it still prints some garbage).", "Its handy for finding the name of the .cpp file", "If this isn't available, it looks for mingw32 (the gcc", "conversion code */ \\n' \\ + decl_code + \\ '", "and verbosity # setting. All input keywords are passed through", "'PyObject *py__globals = NULL;\\n' py_objects = ', '.join(self.arg_specs.py_pointers()) if py_objects:", "mod.customize.add_header(header) # it's nice to let the users know when", "import ext_tools from . import catalog from . import common_info", "not specified, the global dictionary of the calling function is", "return code % self.name def parse_tuple_code(self): \"\"\" Create code block", "The list might look like ``[\"<vector>\",\"'my_header'\"]``. Note that the header", "found raise ValueError('function with correct signature not found') def inline_function_code(code,arg_names,local_dict=None,", "prints out the command lines for the compilation process and", "try: sys.path.insert(0,storage_dir) exec('import ' + module_name) func = eval(module_name+'.compiled_func') finally:", "If 1, the C++ code is compiled every time inline", "[string] Any extra platform- and compiler-specific information to use when", "the extra headers needed by the function to the module.", "extra platform- and compiler-specific information to use when compiling the", "code. It should not specify a return statement. Instead it", "A list of strings specifying header files to use when", "and global Python scope are also available in the C/C++", "directories to search for C/C++ libraries at link time. libraries", "= function_catalog.get_functions(code,module_dir) for func in function_list: try: results = apply(func,(local_dict,global_dict))", "function_cache[code] = func return results except TypeError as msg: #", "\\n\" + \\ cleanup_code + \\ \" if(!(PyObject*)return_val && !exception_occurred)\\n\"", "= call_frame.f_locals if global_dict is None: global_dict = call_frame.f_globals if", "2 prints out the command lines for the compilation process", "through to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) # import the module", "%s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name def template_declaration_code(self): code", "files to use when compiling the code. The list might", "of a ``#include`` statement in the C++ code. customize :", "standard Python function. Values are returned from the C/C++ code", "function_decls class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache = {}", ": string A string of valid C++ code. It should", "\" return_val = py::object(); \\n\" \\ \" exception_occurred = 1;", "\\ ' { \\n' \\ '#if defined(__GNUC__) || defined(__ICC)\\n' \\", "optional This only affects functions that have numpy arrays as", ":mod:`distutils` keywords. These are duplicated from <NAME>'s :class:`distutils.extension.Extension` class for", "portability. Source files may be C, C++, SWIG (.i), platform-specific", "of source filenames, relative to the distribution root (where the", "command-line arguments, but for other platforms it could be anything.", "of compiler to use when compiling. On windows, it understands", "support_code : str, optional A string of valid C++ code", "string. local_dict : dict, optional If specified, it is a", ": string Extension language (i.e. \"c\", \"c++\", \"objc\"). Will be", "be returned to Python in the `return_val`. arg_names : [str],", "to examine it. verbose has no effect if the compilation", "&& !exception_occurred)\\n\" \\ \" {\\n \\n\" \\ \" return_val =", ": dict, optional If specified, it is a dictionary of", "'&py__globals))\\n'\\ ' return NULL;\\n' return declare_return + declare_py_objects + \\", "its own error type, instead of # checking the beginning", "NULL;\\n' return declare_return + declare_py_objects + \\ init_values + parse_tuple", "should be transferred from Python into the C/C++ code. It", "the library module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler,", "py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /* argument", "first, then the # catalog cache, and then persistent catalog.", "NULL;\\n' py_objects = ', '.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects = 'PyObject", "scope for the C/C++ code. If local_dict is not specified", "contents of mutable objects can be changed within the C/C++", "\"#define FOO\" in source or -DFOO on Unix C compiler", "the function. Make sure # the directory where it lives", "you when compiling starts, finishes, and how long it took.", "used much). type_converters : [type converters], optional These guys are", "header files (in Unix form for portability). define_macros : [(name", "I added local_dict... \"\"\" declare_return = 'py::object return_val;\\n' \\ 'int", "define it without a particular value (equivalent of \"#define FOO\"", "a string.\"\"\" arg_strings = [arg.cleanup_code() for arg in self.arg_specs] return", "else is recognized by the \"build_ext\" command as source for", "inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) # if customize (a custom_info object),", "probably only useful if your editing `support_code` a lot. compiler", "is not specified the local dictionary of the calling function", "for the C/C++ code. If local_dict is not specified the", "= (self.name, self.name) function_decls = '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' % args", "\"objc\"). Will be detected from the source extensions if not", "pass else: raise NameError(msg) # 3. try persistent catalog module_dir", "Microsoft C++ compiler. If this isn't available, it looks for", "front of this list include_dirs : [string] List of directories", "' { \\n' \\ '#if defined(__GNUC__) || defined(__ICC)\\n' \\ '", ": [(name : string, value : string|None)] List of macros", "'msvc' and 'gcc' as well as all the compiler names", "platform- and compiler-specific information to use when linking object files", "dict, optional If specified, it is a dictionary of values", "might be needed by your compiled function. This could be", "each macro is defined using a 2-tuple, where 'value' is", "3. try persistent catalog module_dir = global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir)", "search for C/C++ libraries at run time (for shared extensions,", "== \"Conversion Error\": pass else: raise NameError(msg) # 3. try", "code */ \\n' \\ + decl_code + \\ ' /*", "module customization. if customize: mod.customize = customize # add the", "can be pasted at the end of a ``#include`` statement", ": [string] List of directories to search for C/C++ header", "' /* inline code */ \\n' \\ + function_code +", "1 will cause all floating point values to be cast", "exception_occurred = 0;\\n' \\ 'PyObject *py__locals = NULL;\\n' \\ 'PyObject", "{\\n \\n\" \\ \" return_val = Py_None; \\n\" \\ \"", "= '.join(self.arg_specs.py_variables()) if py_vars: init_values = py_vars + ' =", "attempt_function_call(code,local_dict,global_dict) else: # 1. try local cache try: results =", "the python path. try: sys.path.insert(0,storage_dir) exec('import ' + module_name) func", "checking the beginning of the message, but I don't know", "headers=[], customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw): \"\"\" Inline C/C++ code", "if support_code: mod.customize.add_support_code(support_code) # add the extra headers needed by", "= 'PyObject ' + py_objects + ';\\n' else: declare_py_objects =", "input. from __future__ import absolute_import, print_function import sys import os", "with the given compiler and verbosity # setting. All input", "= py::object(); \\n\" \\ \" exception_occurred = 1; \\n\" \\", "compiling. On windows, it understands 'msvc' and 'gcc' as well", "long it took. 2 prints out the command lines for", "\"catch(...) \\n\" \\ \"{ \\n\" + \\ \" return_val =", "work. Its handy for finding the name of the .cpp", "windows with msvc where it still prints some garbage). 1", "types. If you'd like to use a different set of", "the PYTHONCOMPILED environment variable offers the most hope. function_catalog =", "'\\n' + code module_path = function_catalog.unique_module_name(code, module_dir) storage_dir, module_name =", "function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) return results def attempt_function_call(code,local_dict,global_dict): # we", "is recognized by the \"build_ext\" command as source for a", "if msg[:16] == \"Conversion Error\": pass else: raise TypeError(msg) except", "to 1 will cause all floating point values to be", "is None: global_dict = call_frame.f_globals if force: module_dir = global_dict.get('__file__',None)", "function. Make sure # the directory where it lives is", "T>\\n' \\ 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code %", "the given compiler and verbosity # setting. All input keywords", "\\ ' /* inline code */ \\n' \\ + function_code", "called # inline. global function_catalog call_frame = sys._getframe().f_back if local_dict", "problems getting code to work. Its handy for finding the", "having problems getting code to work. Its handy for finding", "!exception_occurred)\\n\" \\ \" {\\n \\n\" \\ \" return_val = Py_None;", "if msg[:16] == \"Conversion Error\": pass else: raise NameError(msg) #", "this to 1 will cause all floating point values to", "of directories to search for C/C++ libraries at link time.", "= sys._getframe().f_back if local_dict is None: local_dict = call_frame.f_locals if", "Variable declarations for all PyObjects are done also. This code", "is silent (except on windows with msvc where it still", "Note that the header strings need to be in a", "as a string.\"\"\" arg_strings = [arg.cleanup_code() for arg in self.arg_specs]", "that was used when compiling Python. Cygwin's behavior should be", "Some specialization is needed for inline extension functions def function_declaration_code(self):", ": base_info.custom_info, optional An alternative way to specify `support_code`, `headers`,", "' + py_objects + ';\\n' else: declare_py_objects = '' py_vars", "resource files, or whatever else is recognized by the \"build_ext\"", "compiling>') # compile code in correct location, with the given", "\" {\\n \\n\" \\ \" return_val = Py_None; \\n\" \\", "def template_declaration_code(self): code = 'template<class T>\\n' \\ 'static PyObject* %s(PyObject*self,", "that should be used as the local scope for the", "raise ValueError('function with correct signature not found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1,", "= inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) # if customize (a custom_info", "Relevant :mod:`distutils` keywords. These are duplicated from <NAME>'s :class:`distutils.extension.Extension` class", "a few options as listed below. Also, the keyword arguments", "when compiling. On windows, it understands 'msvc' and 'gcc' as", "define_macros : [(name : string, value : string|None)] List of", "to be in a form than can be pasted at", "customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw): \"\"\" Inline C/C++ code within", "within Python scripts. ``inline()`` compiles and executes C/C++ code on", "verbose=verbose, support_code=support_code, headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results =", "call_frame = sys._getframe().f_back if local_dict is None: local_dict = call_frame.f_locals", "string.\"\"\" arg_strings = [arg.local_dict_code() for arg in self.arg_specs] return \"\".join(arg_strings)", "exits and returns to Python. inline has quite a few", "was used when compiling Python. Cygwin's behavior should be similar.", "value (equivalent of \"#define FOO\" in source or -DFOO on", "= ndarray_api_version + '\\n' + code module_path = function_catalog.unique_module_name(code, module_dir)", "not found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame = sys._getframe().f_back if", "to undefine explicitly. library_dirs : [string] List of directories to", "= call_frame.f_locals if global_dict is None: global_dict = call_frame.f_globals ext_func", "for all PyObjects are done also. This code got a", "guys are what convert Python data types to C/C++ data", "link against. runtime_library_dirs : [string] List of directories to search", "string of valid C++ code declaring extra code that might", "py_objects: declare_py_objects = 'PyObject ' + py_objects + ';\\n' else:", "macros to define; each macro is defined using a 2-tuple,", "given compiler and verbosity # setting. All input keywords are", "'template<class T>\\n' \\ 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code", "added local_dict... \"\"\" declare_return = 'py::object return_val;\\n' \\ 'int exception_occurred", "into a standard Python function. Values are returned from the", "C/C++ libraries at link time. libraries : [string] List of", "the C/C++ code. Values are passed to the C/C++ code", "msg[:16] == \"Conversion Error\": pass else: raise TypeError(msg) except NameError", "here -- a local cache first, then the # catalog", "= ', '.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects = 'PyObject ' +", "results def attempt_function_call(code,local_dict,global_dict): # we try 3 levels here --", ": [string] List of directories to search for C/C++ libraries", "If `global_dict` is not specified, the global dictionary of the", "of this list include_dirs : [string] List of directories to", "values to be cast as float instead of double if", "cache first, then the # catalog cache, and then persistent", "function to the module. if support_code: mod.customize.add_support_code(support_code) # add the", "options as listed below. Also, the keyword arguments for distutils", "really only useful for debugging, and probably only useful if", "The `module_path` file is always appended to the front of", "documentation for examples. auto_downcast : {1,0}, optional This only affects", "This only affects functions that have numpy arrays as input", "\" /* cleanup code */ \\n\" + \\ cleanup_code +", "all_code def python_function_definition_code(self): args = (self.name, self.name) function_decls = '{\"%s\",(PyCFunction)%s", "results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results except:", "self.arg_specs] return \"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return the code to create", "as the local scope for the C/C++ code. If local_dict", "type_converters=type_converters) mod.add_function(ext_func) # if customize (a custom_info object), then set", "\\ + function_code + \\ ' /*I would like to", "function_declaration_code(self): code = 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code", "arrays are of type float. If even one of the", "source filenames, relative to the distribution root (where the setup", "is always appended to the front of this list include_dirs", "symbols to be exported from a shared extension. Not used", "the local and global Python scope are also available in", "# checking the beginning of the message, but I don't", "argument types here. # This should really have its own", "all the Numeric arrays are of type float. If even", "(e.g. object files not implied by 'sources', static libraries that", "support_code=support_code, headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict)", "/* cleanup code */ \\n\" + \\ cleanup_code + \\", "def arg_local_dict_code(self): \"\"\"Return the code to create the local dict", "type, instead of # checking the beginning of the message,", "function_list = function_catalog.get_functions_fast(code) for func in function_list: try: results =", "are done also. This code got a lot uglier when", "most hope. function_catalog = catalog.catalog() class inline_ext_function(ext_tools.ext_function): # Some specialization", "compiles and executes C/C++ code on the fly. Variables in", "catalog.catalog() class inline_ext_function(ext_tools.ext_function): # Some specialization is needed for inline", "has no effect if the compilation isn't necessary. support_code :", "C, C++, SWIG (.i), platform-specific resource files, or whatever else", "catalog try: results = attempt_function_call(code,local_dict,global_dict) # 3. build the function", "the C++ code is compiled every time inline is called.", "also available in the C/C++ code. Values are passed to", "'sources'. For platforms and compilers where \"command line\" makes sense,", "type conversions than the default, specify them here. Look in", "use when compiling the code. The list might look like", "only understand the values understood by distutils. (I should add", "compiler='', verbose=0, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw): \"\"\"", "optional If 1, the C++ code is compiled every time", "or whatever else is recognized by the \"build_ext\" command as", "the module. for header in headers: mod.customize.add_header(header) # it's nice", "not specify a return statement. Instead it should assign results", "defaults to an empty string. local_dict : dict, optional If", "float. If even one of the arrays has type double", "the message, but I don't know # how to define", "code as a string.\"\"\" arg_strings = [arg.declaration_code(inline=1) for arg in", "string of valid C++ code. It should not specify a", "__attribute__ ((unused));\\n' \\ '#else\\n' \\ ' PyObject* raw_locals;\\n' \\ '", "also. This code got a lot uglier when I added", "the auto_downcast and # type factories setting ext_func = inline_ext_function('compiled_func',code,arg_names,", "(_get_ndarray_c_version(),) # not an easy way for the user_path_list to", "passed through to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) # import the", "return_val = Py_None; \\n\" \\ \" }\\n \\n\" \\ \"", "that the header strings need to be in a form", "parameters. \"\"\" # this grabs the local variables from the", "yet. msg = str(msg) if msg[:16] == \"Conversion Error\": pass", "= compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code, headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast,", "way to specify `support_code`, `headers`, etc. needed by the function.", "against. runtime_library_dirs : [string] List of directories to search for", "{ \\n' \\ '#if defined(__GNUC__) || defined(__ICC)\\n' \\ ' PyObject*", "of library names (not filenames or paths) to link against.", "are passed into a standard Python function. Values are returned", "= 'template<class T>\\n' \\ 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return", "' PyObject* raw_locals;\\n' \\ ' PyObject* raw_globals;\\n' \\ '#endif\\n' \\", "your having problems getting code to work. Its handy for", "', '.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects = 'PyObject ' + py_objects", "str(msg).strip() if msg[:16] == \"Conversion Error\": pass else: raise TypeError(msg)", "one of the arrays has type double or double complex,", "template_declaration_code(self): code = 'template<class T>\\n' \\ 'static PyObject* %s(PyObject*self, PyObject*", "C++ code is compiled every time inline is called. This", "provided. See Also -------- distutils.extension.Extension : Describes additional parameters. \"\"\"", "from a shared extension. Not used on all platforms, and", "customize=None, type_converters=None, auto_downcast=1, **kw): # figure out where to store", "function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) else: # 1. try local cache", "C/C++ data types. If you'd like to use a different", "not an easy way for the user_path_list to come in", "the arrays has type double or double complex, all variables", "source file has the .i extension. depends : [string] List", "attempt_function_call(code,local_dict,global_dict): # we try 3 levels here -- a local", "compiling the source files in 'sources'. For platforms and compilers", "' PyObject* raw_globals;\\n' \\ '#endif\\n' \\ ' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n'", "indent(self.parse_tuple_code(),4) + \\ try_code + \\ indent(catch_code,4) + \\ return_code", "it understands 'msvc' and 'gcc' as well as all the", "that will contain the function. # storage_dir = catalog.intermediate_dir() code", "results = attempt_function_call(code,local_dict,global_dict) else: # 1. try local cache try:", "Python function. Values are returned from the C/C++ code through", "= \" /* cleanup code */ \\n\" + \\ cleanup_code", "use when compiling. On windows, it understands 'msvc' and 'gcc'", "if py_vars: init_values = py_vars + ' = NULL;\\n\\n' else:", "= function_catalog.get_functions_fast(code) for func in function_list: try: results = apply(func,(local_dict,global_dict))", "header in headers: mod.customize.add_header(header) # it's nice to let the", "import absolute_import, print_function import sys import os from . import", "a shared extension. Not used on all platforms, and not", "**kw) # import the module and return the function. Make", "your editing `support_code` a lot. compiler : str, optional The", "with msvc where it still prints some garbage). 1 informs", "customize # add the extra \"support code\" needed by the", "= py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /*", "if your editing `support_code` a lot. compiler : str, optional", "py_to_raw_dict(py__globals,\"_globals\");\\n' \\ ' /* argument conversion code */ \\n' \\", "similar. verbose : {0,1,2}, optional Specifies how much information is", "maintain their standard types. newarr_converter : int, optional Unused. Other", "function_catalog.get_functions_fast(code) for func in function_list: try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func)", "\\ ' }\\n' catch_code = \"catch(...) \\n\" \\ \"{ \\n\"", "mod.add_function(ext_func) # if customize (a custom_info object), then set the", "'gcc' as well as all the compiler names understood by", "Setting this to 1 will cause all floating point values", "distutils extension modules are accepted to specify extra information needed", "the .cpp file if you need to examine it. verbose", "arg_strings = [arg.cleanup_code() for arg in self.arg_specs] return \"\".join(arg_strings) def", "**kw): # figure out where to store and what to", "local and global Python scope are also available in the", "This could be declarations of functions, classes, or structures. headers", "\"\"\" Create code block for PyArg_ParseTuple. Variable declarations for all", "== \"Conversion Error\": pass else: raise NameError(msg) except KeyError: pass", "compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, **kw):", "code that might be needed by your compiled function. This", "\\n' \\ ' }\\n' catch_code = \"catch(...) \\n\" \\ \"{", "function. Values are returned from the C/C++ code through a", "= indent(self.arg_local_dict_code(),4) try_code = \\ ' try \\n' \\ '", "like to use a different set of type conversions than", "keywords are passed through to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) #", "the distribution root (where the setup script lives), in Unix", "global_dict = call_frame.f_globals if force: module_dir = global_dict.get('__file__',None) func =", "# as input. from __future__ import absolute_import, print_function import sys", "{0, 1}, optional If 1, the C++ code is compiled", "# type factories setting ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func)", "= '/* NDARRAY API VERSION %x */' % (_get_ndarray_c_version(),) #", "extra platform- and compiler-specific information to use when linking object", "is None: local_dict = call_frame.f_locals if global_dict is None: global_dict", "file has the .i extension. depends : [string] List of", "py_vars + ' = NULL;\\n\\n' else: init_values = '' parse_tuple", "\"\"\" Inline C/C++ code within Python scripts. ``inline()`` compiles and", "inline has quite a few options as listed below. Also,", ". import catalog from . import common_info from numpy.core.multiarray import", "self.arg_specs] return \"\".join(arg_strings) def function_code(self): from .ext_tools import indent decl_code", "``inline()`` compiles and executes C/C++ code on the fly. Variables", "options to pass to SWIG if a source file has", "distutils.extension.Extension : Describes additional parameters. \"\"\" # this grabs the", "= func return results except: # should specify argument types", "few options as listed below. Also, the keyword arguments for", "SWIG if a source file has the .i extension. depends", "the cleanup code as a string.\"\"\" arg_strings = [arg.cleanup_code() for", "for C/C++ libraries at run time (for shared extensions, this", "standard types. newarr_converter : int, optional Unused. Other Parameters ----------------", "like ``[\"<vector>\",\"'my_header'\"]``. Note that the header strings need to be", "PYTHONCOMPILED environment variable offers the most hope. function_catalog = catalog.catalog()", "= apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results except: #", "' PyObject* raw_locals __attribute__ ((unused));\\n' \\ ' PyObject* raw_globals __attribute__", "functions to take a local and global dict # as", "parse_tuple def arg_declaration_code(self): \"\"\"Return the declaration code as a string.\"\"\"", "argument called return_val. Also, the contents of mutable objects can", "' return NULL;\\n' return declare_return + declare_py_objects + \\ init_values", "optional A list of Python variable names that should be", "to use when linking object files together to create the", "\\ \" return_val = py::object(); \\n\" \\ \" exception_occurred =", "headers: mod.customize.add_header(header) # it's nice to let the users know", "calling function is used. global_dict : dict, optional If specified,", "__init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache = {} def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0,", "necessary. support_code : str, optional A string of valid C++", "would like to fill in changed locals and globals here...*/", "exception_occurred = 1; \\n\" \\ \"} \\n\" return_code = \"", "and compiler-specific information to use when compiling the source files", "converters], optional These guys are what convert Python data types", "to use when compiling. On windows, it understands 'msvc' and", "be cast as float instead of double if all the", "editing `support_code` a lot. compiler : str, optional The name", "statement. Instead it should assign results that need to be", "import build_tools compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict,", "be explicitly specified, binary resource files, etc.) extra_compile_args : [string]", "function that called # inline. global function_catalog call_frame = sys._getframe().f_back", "\\ \" if(!(PyObject*)return_val && !exception_occurred)\\n\" \\ \" {\\n \\n\" \\", "raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\ '", "here. pass # if we get here, the function wasn't", "Describes additional parameters. \"\"\" # this grabs the local variables", "to define it to or None to define it without", "during the compile phase of inlining code. 0 is silent", "a ``#include`` statement in the C++ code. customize : base_info.custom_info,", "Will be detected from the source extensions if not provided.", "compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code, headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw)", "[string] List of source filenames, relative to the distribution root", "List of source filenames, relative to the distribution root (where", "List of macros to define; each macro is defined using", "# it's nice to let the users know when anything", "this). On windows, the compiler defaults to the Microsoft C++", "directories to search for C/C++ libraries at run time (for", "defined(__ICC)\\n' \\ ' PyObject* raw_locals __attribute__ ((unused));\\n' \\ ' PyObject*", "look like ``[\"<vector>\",\"'my_header'\"]``. Note that the header strings need to", "from the C/C++ code through a special argument called return_val.", "the extra \"support code\" needed by the function to the", "(or to create a new static Python interpreter). Similar interpretation", "in the `return_val`. arg_names : [str], optional A list of", "else: # 1. try local cache try: results = apply(function_cache[code],(local_dict,global_dict))", "name the extension module # that will contain the function.", ".ext_tools import indent decl_code = indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4) function_code", "specify them here. Look in the type conversions section of", "storage_dir = catalog.intermediate_dir() code = ndarray_api_version + '\\n' + code", "\\ '#else\\n' \\ ' PyObject* raw_locals;\\n' \\ ' PyObject* raw_globals;\\n'", "List of directories to search for C/C++ libraries at link", "to be cast as float instead of double if all", "source files in 'sources'. For platforms and compilers where \"command", "though to this). On windows, the compiler defaults to the", "(for shared extensions, this is when the extension is loaded).", "to search for C/C++ libraries at run time (for shared", "the contents of mutable objects can be changed within the", "import os from . import ext_tools from . import catalog", "by assignment much like variables passed are passed into a", "Specifies how much information is printed during the compile phase", "# This should really have its own error type, instead", "needed for compiling. Parameters ---------- code : string A string", "+ ';\\n' else: declare_py_objects = '' py_vars = ' =", "[type converters], optional These guys are what convert Python data", ": [string] Any extra options to pass to SWIG if", "object), then set the module customization. if customize: mod.customize =", "def arg_cleanup_code(self): \"\"\"Return the cleanup code as a string.\"\"\" arg_strings", "the module. if support_code: mod.customize.add_support_code(support_code) # add the extra headers", "On Unix, it'll only understand the values understood by distutils.", "headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) else:", "export exactly one symbol: \"init\" + extension_name. swig_opts : [string]", "typically a list of command-line arguments, but for other platforms", "for portability. Source files may be C, C++, SWIG (.i),", "to use when compiling the source files in 'sources'. For", "of extra files to link with (e.g. object files not", "code. 0 is silent (except on windows with msvc where", "the compilation isn't necessary. support_code : str, optional A string", "as the # slowdown is very noticeable. if verbose >", "= Py_None; \\n\" \\ \" }\\n \\n\" \\ \" return", "local cache try: results = apply(function_cache[code],(local_dict,global_dict)) return results except TypeError", "= attempt_function_call(code,local_dict,global_dict) else: # 1. try local cache try: results", "effect if the compilation isn't necessary. support_code : str, optional", "float instead of double if all the Numeric arrays are", "call # frame -- that is the locals from the", "\\ \"{ \\n\" + \\ \" return_val = py::object(); \\n\"", "compiler that was used when compiling Python. Cygwin's behavior should", "(a custom_info object), then set the module customization. if customize:", "---------------- Relevant :mod:`distutils` keywords. These are duplicated from <NAME>'s :class:`distutils.extension.Extension`", "= 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ ' return NULL;\\n' return declare_return +", "passed are passed into a standard Python function. Values are", "defined(__GNUC__) || defined(__ICC)\\n' \\ ' PyObject* raw_locals __attribute__ ((unused));\\n' \\", "# create the function. This relies on the auto_downcast and", "None: local_dict = call_frame.f_locals if global_dict is None: global_dict =", "List of files that the extension depends on. language :", "\\ + decl_code + \\ ' /* inline code */", "\\n\" return_code = \" /* cleanup code */ \\n\" +", "in the python path. try: sys.path.insert(0,storage_dir) exec('import ' + module_name)", "code through a special argument called return_val. Also, the contents", "informs you when compiling starts, finishes, and how long it", "else: declare_py_objects = '' py_vars = ' = '.join(self.arg_specs.py_variables()) if", "extra_objects : [string] List of extra files to link with", "inline extension functions def function_declaration_code(self): code = 'static PyObject* %s(PyObject*self,", ". import common_info from numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version = '/*", "used. force : {0, 1}, optional If 1, the C++", ": str, optional A string of valid C++ code declaring", "{1,0}, optional This only affects functions that have numpy arrays", ": [string] List of library names (not filenames or paths)", "source or -DFOO on Unix C compiler command line). undef_macros", "= \\ ' try \\n' \\ ' { \\n' \\", "link with (e.g. object files not implied by 'sources', static", "the local dict as a string.\"\"\" arg_strings = [arg.local_dict_code() for", "sure this'll be used much). type_converters : [type converters], optional", "+ '\\n' + code module_path = function_catalog.unique_module_name(code, module_dir) storage_dir, module_name", "as input variables. Setting this to 1 will cause all", "= build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1,", "force=0, compiler='', verbose=0, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw):", "it still prints some garbage). 1 informs you when compiling", "C++, SWIG (.i), platform-specific resource files, or whatever else is", "or None to define it without a particular value (equivalent", "string.\"\"\" arg_strings = [arg.declaration_code(inline=1) for arg in self.arg_specs] return \"\".join(arg_strings)", "library module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose,", "not generally necessary for Python extensions, which typically export exactly", "parse_tuple_code(self): \"\"\" Create code block for PyArg_ParseTuple. Variable declarations for", "the C/C++ code. If `global_dict` is not specified, the global", "and can be useful if your having problems getting code", "global_dict = call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from .", "the function. # storage_dir = catalog.intermediate_dir() code = ndarray_api_version +", "used. global_dict : dict, optional If specified, it is a", "ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) # if customize (a", ": {0, 1}, optional If 1, the C++ code is", "globals here...*/ \\n' \\ ' }\\n' catch_code = \"catch(...) \\n\"", "\\n\" \\ \"{ \\n\" + \\ \" return_val = py::object();", "auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) else: # 1. try", "platforms, and not generally necessary for Python extensions, which typically", "all_code = self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4) + \\ try_code +", "of macros to undefine explicitly. library_dirs : [string] List of", "compiling Python. Cygwin's behavior should be similar. verbose : {0,1,2},", "prints some garbage). 1 informs you when compiling starts, finishes,", "could be declarations of functions, classes, or structures. headers :", "\\ ' /* argument conversion code */ \\n' \\ +", "as all the compiler names understood by distutils. On Unix,", "variables maintain their standard types. newarr_converter : int, optional Unused.", "indent(self.code_block,4) # local_dict_code = indent(self.arg_local_dict_code(),4) try_code = \\ ' try", "conversions section of the main documentation for examples. auto_downcast :", "function_list = function_catalog.get_functions(code,module_dir) for func in function_list: try: results =", "is defined using a 2-tuple, where 'value' is either the", "string, value : string|None)] List of macros to define; each", "dictionary of values that should be used as the local", "= 0;\\n' \\ 'PyObject *py__locals = NULL;\\n' \\ 'PyObject *py__globals", "\\ ' PyObject* raw_locals __attribute__ ((unused));\\n' \\ ' PyObject* raw_globals", "be in a form than can be pasted at the", "compile code in correct location, with the given compiler and", "of the calling function is used. force : {0, 1},", "\\ \" }\\n \\n\" \\ \" return return_val.disown(); \\n\" \\", "except KeyError: pass # 2. try function catalog try: results", "the local variables from the *previous* call # frame --", "function. See :mod:`scipy.weave.base_info` for more details. (not sure this'll be", "the # slowdown is very noticeable. if verbose > 0:", "'#endif\\n' \\ ' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals =", "where \"command line\" makes sense, this is typically a list", "self._build_information.append(common_info.inline_info()) function_cache = {} def inline(code,arg_names=[],local_dict=None, global_dict=None, force=0, compiler='', verbose=0,", "floating point values to be cast as float instead of", "will contain the function. # storage_dir = catalog.intermediate_dir() code =", "\\n\" \\ \"} \\n\" all_code = self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4)", "optional A string of valid C++ code declaring extra code", "create the function. This relies on the auto_downcast and #", "return \"\".join(arg_strings) def function_code(self): from .ext_tools import indent decl_code =", "optional These guys are what convert Python data types to", "code module_path = function_catalog.unique_module_name(code, module_dir) storage_dir, module_name = os.path.split(module_path) mod", "extension. Not used on all platforms, and not generally necessary", "the C/C++ code. If local_dict is not specified the local", "here. # the PYTHONCOMPILED environment variable offers the most hope.", "declaration code as a string.\"\"\" arg_strings = [arg.declaration_code(inline=1) for arg", "\\ \"} \\n\" all_code = self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4) +", "compiler and verbosity # setting. All input keywords are passed", "and the changes remain after the C code exits and", "compiler defaults to the Microsoft C++ compiler. If this isn't", "' try \\n' \\ ' { \\n' \\ '#if defined(__GNUC__)", "*/' % (_get_ndarray_c_version(),) # not an easy way for the", "specify extra information needed for compiling. Parameters ---------- code :", "\" }\\n \\n\" \\ \" return return_val.disown(); \\n\" \\ \"}", "# Some specialization is needed for inline extension functions def", "as a string.\"\"\" arg_strings = [arg.local_dict_code() for arg in self.arg_specs]", "nice to let the users know when anything gets compiled,", "headers=[], customize=None, type_converters=None, auto_downcast=1, **kw): # figure out where to", "results except TypeError as msg: # should specify argument types", "verbosity # setting. All input keywords are passed through to", "code = 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code %", "alternative way to specify `support_code`, `headers`, etc. needed by the", "useful if your editing `support_code` a lot. compiler : str,", "code % self.name def parse_tuple_code(self): \"\"\" Create code block for", "C++ code. It should not specify a return statement. Instead", "self.name def parse_tuple_code(self): \"\"\" Create code block for PyArg_ParseTuple. Variable", "A string of valid C++ code. It should not specify", "implied by 'sources', static libraries that must be explicitly specified,", "it should assign results that need to be returned to", "import indent decl_code = indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4) function_code =", "[string] List of extra files to link with (e.g. object", "Instead it should assign results that need to be returned", "`headers`, etc. needed by the function. See :mod:`scipy.weave.base_info` for more", "This relies on the auto_downcast and # type factories setting", "locals from the function that called # inline. global function_catalog", "py_vars: init_values = py_vars + ' = NULL;\\n\\n' else: init_values", "should be used as the local scope for the C/C++", "to create a new static Python interpreter). Similar interpretation as", "\" return_val = Py_None; \\n\" \\ \" }\\n \\n\" \\", "# the PYTHONCOMPILED environment variable offers the most hope. function_catalog", "declare_py_objects = 'PyObject ' + py_objects + ';\\n' else: declare_py_objects", "sys.path.insert(0,storage_dir) exec('import ' + module_name) func = eval(module_name+'.compiled_func') finally: del", "list might look like ``[\"<vector>\",\"'my_header'\"]``. Note that the header strings", "print_function import sys import os from . import ext_tools from", "easy way for the user_path_list to come in here. #", ": [string] List of symbols to be exported from a", "point values to be cast as float instead of double", "in 'sources'. For platforms and compilers where \"command line\" makes", "all variables maintain their standard types. newarr_converter : int, optional", "type_converters : [type converters], optional These guys are what convert", "of type conversions than the default, specify them here. Look", "the user_path_list to come in here. # the PYTHONCOMPILED environment", "that should be transferred from Python into the C/C++ code.", "[string] List of directories to search for C/C++ libraries at", "int, optional Unused. Other Parameters ---------------- Relevant :mod:`distutils` keywords. These", "if a source file has the .i extension. depends :", "customize: mod.customize = customize # add the extra \"support code\"", "\"\"\" declare_return = 'py::object return_val;\\n' \\ 'int exception_occurred = 0;\\n'", "' raw_locals = py_to_raw_dict(py__locals,\"_locals\");\\n' \\ ' raw_globals = py_to_raw_dict(py__globals,\"_globals\");\\n' \\", "specify `support_code`, `headers`, etc. needed by the function. See :mod:`scipy.weave.base_info`", "function catalog try: results = attempt_function_call(code,local_dict,global_dict) # 3. build the", "are returned from the C/C++ code through a special argument", "call_frame.f_locals if global_dict is None: global_dict = call_frame.f_globals ext_func =", "interpreter). Similar interpretation as for 'extra_compile_args'. export_symbols : [string] List", "C/C++ code and the changes remain after the C code", "auto_downcast=1, **kw): # figure out where to store and what", "C++ compiler. If this isn't available, it looks for mingw32", "+ \\ indent(catch_code,4) + \\ return_code return all_code def python_function_definition_code(self):", "code to create the local dict as a string.\"\"\" arg_strings", "Unix, it'll probably use the same compiler that was used", "libraries at link time. libraries : [string] List of library", "# we try 3 levels here -- a local cache", "sys import os from . import ext_tools from . import", ": [type converters], optional These guys are what convert Python", "a dictionary of values that should be used as the", "are what convert Python data types to C/C++ data types.", "etc. needed by the function. See :mod:`scipy.weave.base_info` for more details.", "note:: The `module_path` file is always appended to the front", "can be changed within the C/C++ code and the changes", "None to define it without a particular value (equivalent of", "information to use when linking object files together to create", "other platforms it could be anything. extra_link_args : [string] Any", "should really have its own error type, instead of #", "args)\\n{\\n' return code % self.name def template_declaration_code(self): code = 'template<class", "global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code, headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir)", "# should specify argument types here. pass # if we", "files, etc.) extra_compile_args : [string] Any extra platform- and compiler-specific", "be C, C++, SWIG (.i), platform-specific resource files, or whatever", "local dict as a string.\"\"\" arg_strings = [arg.local_dict_code() for arg", "\\ 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n' return code % self.name", "after the C code exits and returns to Python. inline", "form (slash-separated) for portability. Source files may be C, C++,", "to let the users know when anything gets compiled, as", "code : string A string of valid C++ code. It", "specifying header files to use when compiling the code. The", "\"Conversion Error\": pass else: raise TypeError(msg) except NameError as msg:", ".. note:: The `module_path` file is always appended to the", "should add 'gcc' though to this). On windows, the compiler", "= catalog.catalog() class inline_ext_function(ext_tools.ext_function): # Some specialization is needed for", "indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4) # local_dict_code = indent(self.arg_local_dict_code(),4) try_code =", "= call_frame.f_globals if force: module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict,", "local_dict_code = indent(self.arg_local_dict_code(),4) try_code = \\ ' try \\n' \\", "than can be pasted at the end of a ``#include``", "'/* NDARRAY API VERSION %x */' % (_get_ndarray_c_version(),) # not", "a new static Python interpreter). Similar interpretation as for 'extra_compile_args'.", "lines for the compilation process and can be useful if", "inline code */ \\n' \\ + function_code + \\ '", "the C++ code. customize : base_info.custom_info, optional An alternative way", ": int, optional Unused. Other Parameters ---------------- Relevant :mod:`distutils` keywords.", "catalog. # global function_catalog # 1. try local cache try:", "\\n' \\ '#if defined(__GNUC__) || defined(__ICC)\\n' \\ ' PyObject* raw_locals", "search for C/C++ libraries at link time. libraries : [string]", "the function wasn't found raise ValueError('function with correct signature not", "always appended to the front of this list include_dirs :", "'sources', static libraries that must be explicitly specified, binary resource", "the source files in 'sources'. For platforms and compilers where", "if force: module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler,", "a source file has the .i extension. depends : [string]", "except KeyError: pass # 2. try catalog cache. function_list =", "command lines for the compilation process and can be useful", "to this). On windows, the compiler defaults to the Microsoft", "of the calling function is used. global_dict : dict, optional", "found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame = sys._getframe().f_back if local_dict", "printed during the compile phase of inlining code. 0 is", "return_code return all_code def python_function_definition_code(self): args = (self.name, self.name) function_decls", "C/C++ code. If `global_dict` is not specified, the global dictionary", "file if you need to examine it. verbose has no", "variables. Setting this to 1 will cause all floating point", "base_info.custom_info, optional An alternative way to specify `support_code`, `headers`, etc.", "this'll be used much). type_converters : [type converters], optional These", "ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from . import build_tools compiler", "1; \\n\" \\ \"} \\n\" return_code = \" /* cleanup", "and what to name the extension module # that will", "Python in the `return_val`. arg_names : [str], optional A list", "optional An alternative way to specify `support_code`, `headers`, etc. needed", "their standard types. newarr_converter : int, optional Unused. Other Parameters", "static Python interpreter). Similar interpretation as for 'extra_compile_args'. export_symbols :", "available, it looks for mingw32 (the gcc compiler). On Unix,", "the string to define it to or None to define", "file is always appended to the front of this list", "be changed within the C/C++ code and the changes remain", "understood by distutils. On Unix, it'll only understand the values", "compiler. If this isn't available, it looks for mingw32 (the", "\\ 'PyObject *py__locals = NULL;\\n' \\ 'PyObject *py__globals = NULL;\\n'", "string.\"\"\" arg_strings = [arg.cleanup_code() for arg in self.arg_specs] return \"\".join(arg_strings)", "local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) # if customize (a custom_info object), then", "declare_py_objects + \\ init_values + parse_tuple def arg_declaration_code(self): \"\"\"Return the", "func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code, headers=headers, customize=customize, type_converters=type_converters,", ". import ext_tools from . import catalog from . import", "args)\\n{\\n' return code % self.name def parse_tuple_code(self): \"\"\" Create code", "it. verbose has no effect if the compilation isn't necessary.", "debugging, and probably only useful if your editing `support_code` a", "'gcc' though to this). On windows, the compiler defaults to", "let the users know when anything gets compiled, as the", "create a new static Python interpreter). Similar interpretation as for", "keywords. These are duplicated from <NAME>'s :class:`distutils.extension.Extension` class for convenience:", "by 'sources', static libraries that must be explicitly specified, binary", "init_values + parse_tuple def arg_declaration_code(self): \"\"\"Return the declaration code as", "resource files, etc.) extra_compile_args : [string] Any extra platform- and", "compiling the code. The list might look like ``[\"<vector>\",\"'my_header'\"]``. Note", "numpy arrays as input variables. Setting this to 1 will", "as msg: msg = str(msg).strip() if msg[:16] == \"Conversion Error\":", "be anything. extra_link_args : [string] Any extra platform- and compiler-specific", "{0,1,2}, optional Specifies how much information is printed during the", "used as the local scope for the C/C++ code. If", "except TypeError as msg: msg = str(msg).strip() if msg[:16] ==", "form than can be pasted at the end of a", "useful if your having problems getting code to work. Its", "are duplicated from <NAME>'s :class:`distutils.extension.Extension` class for convenience: sources :", "'extra_compile_args'. export_symbols : [string] List of symbols to be exported", "C/C++ libraries at run time (for shared extensions, this is", "depends on. language : string Extension language (i.e. \"c\", \"c++\",", "lot uglier when I added local_dict... \"\"\" declare_return = 'py::object", "as input. from __future__ import absolute_import, print_function import sys import", "On windows, it understands 'msvc' and 'gcc' as well as", "compiler names understood by distutils. On Unix, it'll only understand", "for Python extensions, which typically export exactly one symbol: \"init\"", "+ \\ return_code return all_code def python_function_definition_code(self): args = (self.name,", "apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results except: # should", "of the .cpp file if you need to examine it.", "use when linking object files together to create the extension", ":class:`distutils.extension.Extension` class for convenience: sources : [string] List of source", "this is typically a list of command-line arguments, but for", "code as a string.\"\"\" arg_strings = [arg.cleanup_code() for arg in", "out the command lines for the compilation process and can", "for compiling. Parameters ---------- code : string A string of", "Error\": pass else: raise NameError(msg) except KeyError: pass # 2.", "from the source extensions if not provided. See Also --------", "still prints some garbage). 1 informs you when compiling starts,", "= apply(function_cache[code],(local_dict,global_dict)) return results except TypeError as msg: msg =", "in a form than can be pasted at the end", "(equivalent of \"#define FOO\" in source or -DFOO on Unix", "Python data types to C/C++ data types. If you'd like", "= py_vars + ' = NULL;\\n\\n' else: init_values = ''", "verbose=verbose, **kw) # import the module and return the function.", "to specify extra information needed for compiling. Parameters ---------- code", "Python variable names that should be transferred from Python into", "persistent catalog. # global function_catalog # 1. try local cache", "python_function_definition_code(self): args = (self.name, self.name) function_decls = '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n'", "on the auto_downcast and # type factories setting ext_func =", "valid C++ code. It should not specify a return statement.", "compiler=compiler, verbose=verbose, support_code=support_code, headers=headers, customize=customize, type_converters=type_converters, auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results", "parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ ' return NULL;\\n' return declare_return", ". import build_tools compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code() def", "[string] List of files that the extension depends on. language", "arg_strings = [arg.declaration_code(inline=1) for arg in self.arg_specs] return \"\".join(arg_strings) def", "call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from . import build_tools", "object files together to create the extension (or to create", "NDARRAY API VERSION %x */' % (_get_ndarray_c_version(),) # not an", "= apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results except TypeError", "extensions, which typically export exactly one symbol: \"init\" + extension_name.", "if msg[:16] == \"Conversion Error\": pass else: raise NameError(msg) except", "from . import ext_tools from . import catalog from .", "for the user_path_list to come in here. # the PYTHONCOMPILED", "code declaring extra code that might be needed by your", "define that yet. msg = str(msg) if msg[:16] == \"Conversion", "as for 'extra_compile_args'. export_symbols : [string] List of symbols to", "\"\"\"Return the code to create the local dict as a", "self.arg_specs] return \"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return the cleanup code as", "figure out where to store and what to name the", "a special argument called return_val. Also, the contents of mutable", "-------- distutils.extension.Extension : Describes additional parameters. \"\"\" # this grabs", "the local scope for the C/C++ code. If local_dict is", "where it still prints some garbage). 1 informs you when", "calling function is used. force : {0, 1}, optional If", "a form than can be pasted at the end of", ", METH_VARARGS},\\n' % args return function_decls class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''):", "factories setting ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) # if", "newarr_converter=0, **kw): \"\"\" Inline C/C++ code within Python scripts. ``inline()``", "' + module_name) func = eval(module_name+'.compiled_func') finally: del sys.path[0] return", "== \"Conversion Error\": pass else: raise TypeError(msg) except NameError as", "have numpy arrays as input variables. Setting this to 1", "1. try local cache try: results = apply(function_cache[code],(local_dict,global_dict)) return results", "the header strings need to be in a form than", "# should re-write compiled functions to take a local and", "% self.name def parse_tuple_code(self): \"\"\" Create code block for PyArg_ParseTuple.", "returned to Python in the `return_val`. arg_names : [str], optional", "function. This could be declarations of functions, classes, or structures.", "of strings specifying header files to use when compiling the", "local variables from the *previous* call # frame -- that", "[string] List of directories to search for C/C++ header files", "raw_locals;\\n' \\ ' PyObject* raw_globals;\\n' \\ '#endif\\n' \\ ' raw_locals", "should be similar. verbose : {0,1,2}, optional Specifies how much", "windows, the compiler defaults to the Microsoft C++ compiler. If", "include_dirs : [string] List of directories to search for C/C++", "function_code = indent(self.code_block,4) # local_dict_code = indent(self.arg_local_dict_code(),4) try_code = \\", "a Python extension. .. note:: The `module_path` file is always", "Unix C compiler command line). undef_macros : [string] List of", "See Also -------- distutils.extension.Extension : Describes additional parameters. \"\"\" #", "the end of a ``#include`` statement in the C++ code.", "else: init_values = '' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ '", "}\\n' catch_code = \"catch(...) \\n\" \\ \"{ \\n\" + \\", "string to define it to or None to define it", "C/C++ code on the fly. Variables in the local and", "as float instead of double if all the Numeric arrays", "generally necessary for Python extensions, which typically export exactly one", "__attribute__ ((unused));\\n' \\ ' PyObject* raw_globals __attribute__ ((unused));\\n' \\ '#else\\n'", "return declare_return + declare_py_objects + \\ init_values + parse_tuple def", "etc.) extra_compile_args : [string] Any extra platform- and compiler-specific information", "of \"#define FOO\" in source or -DFOO on Unix C", "to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw) # import the module and", "Unix, it'll only understand the values understood by distutils. (I", "It defaults to an empty string. local_dict : dict, optional", "can be useful if your having problems getting code to", "METH_VARARGS},\\n' % args return function_decls class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler)", "as msg: # should specify argument types here. # This", "def parse_tuple_code(self): \"\"\" Create code block for PyArg_ParseTuple. Variable declarations", "a list of command-line arguments, but for other platforms it", "for debugging, and probably only useful if your editing `support_code`", "extension. .. note:: The `module_path` file is always appended to", "code. Values are passed to the C/C++ code by assignment", "-DFOO on Unix C compiler command line). undef_macros : [string]", "C code exits and returns to Python. inline has quite", "of the main documentation for examples. auto_downcast : {1,0}, optional", "try: results = attempt_function_call(code,local_dict,global_dict) # 3. build the function except", "and 'gcc' as well as all the compiler names understood", "str(msg) if msg[:16] == \"Conversion Error\": pass else: raise TypeError(msg)", "results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func) function_cache[code] = func return results except", "= '' py_vars = ' = '.join(self.arg_specs.py_variables()) if py_vars: init_values", "to or None to define it without a particular value", "the C/C++ code by assignment much like variables passed are", "is printed during the compile phase of inlining code. 0", "\"c\", \"c++\", \"objc\"). Will be detected from the source extensions", "setting ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) # if customize", "way for the user_path_list to come in here. # the", "take a local and global dict # as input. from", "return_val = py::object(); \\n\" \\ \" exception_occurred = 1; \\n\"", "compiler). On Unix, it'll probably use the same compiler that", "pass else: raise NameError(msg) except KeyError: pass # 2. try", "for C/C++ header files (in Unix form for portability). define_macros", "of directories to search for C/C++ libraries at run time", "to search for C/C++ libraries at link time. libraries :", "def attempt_function_call(code,local_dict,global_dict): # we try 3 levels here -- a", "raw_locals __attribute__ ((unused));\\n' \\ ' PyObject* raw_globals __attribute__ ((unused));\\n' \\", "to use a different set of type conversions than the", "catalog cache. function_list = function_catalog.get_functions_fast(code) for func in function_list: try:", "specified, the global dictionary of the calling function is used.", "NULL;\\n' \\ 'PyObject *py__globals = NULL;\\n' py_objects = ', '.join(self.arg_specs.py_pointers())", "be exported from a shared extension. Not used on all", "the extension (or to create a new static Python interpreter).", "type_converters=None,compiler=''): call_frame = sys._getframe().f_back if local_dict is None: local_dict =", "`support_code`, `headers`, etc. needed by the function. See :mod:`scipy.weave.base_info` for", "of valid C++ code declaring extra code that might be", "it to or None to define it without a particular", "use when compiling the source files in 'sources'. For platforms", "Extension language (i.e. \"c\", \"c++\", \"objc\"). Will be detected from", "declarations of functions, classes, or structures. headers : [str], optional", "build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1, support_code=None,", "# add the extra \"support code\" needed by the function", "compilation isn't necessary. support_code : str, optional A string of", "str, optional A string of valid C++ code declaring extra", "os from . import ext_tools from . import catalog from", "local_dict,global_dict,auto_downcast, type_converters=type_converters) from . import build_tools compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler)", "\\n\" all_code = self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4) + \\ try_code", "If you'd like to use a different set of type", "really have its own error type, instead of # checking", "names understood by distutils. On Unix, it'll only understand the", "try persistent catalog module_dir = global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir) for", "API VERSION %x */' % (_get_ndarray_c_version(),) # not an easy", "the C/C++ code and the changes remain after the C", "See :mod:`scipy.weave.base_info` for more details. (not sure this'll be used", "extension is loaded). extra_objects : [string] List of extra files", "# if customize (a custom_info object), then set the module", "inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame = sys._getframe().f_back if local_dict is None:", "code block for PyArg_ParseTuple. Variable declarations for all PyObjects are", "C compiler command line). undef_macros : [string] List of macros", "are passed to the C/C++ code by assignment much like", "C/C++ header files (in Unix form for portability). define_macros :", "the setup script lives), in Unix form (slash-separated) for portability.", "that must be explicitly specified, binary resource files, etc.) extra_compile_args", "use a different set of type conversions than the default,", "finishes, and how long it took. 2 prints out the", "the compiler names understood by distutils. On Unix, it'll only", "\"\"\"Return the declaration code as a string.\"\"\" arg_strings = [arg.declaration_code(inline=1)", "extensions if not provided. See Also -------- distutils.extension.Extension : Describes", "def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame = sys._getframe().f_back if local_dict is", "_get_ndarray_c_version ndarray_api_version = '/* NDARRAY API VERSION %x */' %", "from the function that called # inline. global function_catalog call_frame", "%x */' % (_get_ndarray_c_version(),) # not an easy way for", "'' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ ' return NULL;\\n' return", "Not used on all platforms, and not generally necessary for", "\\ init_values + parse_tuple def arg_declaration_code(self): \"\"\"Return the declaration code", "from <NAME>'s :class:`distutils.extension.Extension` class for convenience: sources : [string] List", "# frame -- that is the locals from the function", "ext_func.set_compiler(compiler) return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='', verbose=1, support_code=None, headers=[],", "list include_dirs : [string] List of directories to search for", "(the gcc compiler). On Unix, it'll probably use the same", "cache, and then persistent catalog. # global function_catalog # 1.", "results except: # should specify argument types here. pass #", "return return_val.disown(); \\n\" \\ \"} \\n\" all_code = self.function_declaration_code() +", "will cause all floating point values to be cast as", "TypeError(msg) except NameError as msg: msg = str(msg).strip() if msg[:16]", "as source for a Python extension. .. note:: The `module_path`", "for finding the name of the .cpp file if you", "search for C/C++ header files (in Unix form for portability).", "by the \"build_ext\" command as source for a Python extension.", "library names (not filenames or paths) to link against. runtime_library_dirs", "KeyError: pass # 2. try catalog cache. function_list = function_catalog.get_functions_fast(code)", "beginning of the message, but I don't know # how", "specified, binary resource files, etc.) extra_compile_args : [string] Any extra", "support_code: mod.customize.add_support_code(support_code) # add the extra headers needed by the", "module # that will contain the function. # storage_dir =", "input keywords are passed through to distutils mod.compile(location=storage_dir,compiler=compiler, verbose=verbose, **kw)", "anything gets compiled, as the # slowdown is very noticeable.", "self.name) function_decls = '{\"%s\",(PyCFunction)%s , METH_VARARGS},\\n' % args return function_decls", "users know when anything gets compiled, as the # slowdown", "verbose=0, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw): \"\"\" Inline", "cleanup code as a string.\"\"\" arg_strings = [arg.cleanup_code() for arg", "code. It defaults to an empty string. local_dict : dict,", "extra_link_args : [string] Any extra platform- and compiler-specific information to", "compile phase of inlining code. 0 is silent (except on", "indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4) # local_dict_code =", "On windows, the compiler defaults to the Microsoft C++ compiler.", "NameError as msg: msg = str(msg).strip() if msg[:16] == \"Conversion", "conversions than the default, specify them here. Look in the", "detected from the source extensions if not provided. See Also", "Other Parameters ---------------- Relevant :mod:`distutils` keywords. These are duplicated from", "the .i extension. depends : [string] List of files that", "anything. extra_link_args : [string] Any extra platform- and compiler-specific information", "function_catalog.get_functions(code,module_dir) for func in function_list: try: results = apply(func,(local_dict,global_dict)) function_catalog.fast_cache(code,func)", "names (not filenames or paths) to link against. runtime_library_dirs :", "know when anything gets compiled, as the # slowdown is", "as the global scope for the C/C++ code. If `global_dict`", "+ \\ indent(self.parse_tuple_code(),4) + \\ try_code + \\ indent(catch_code,4) +", "should specify argument types here. # This should really have", "at the end of a ``#include`` statement in the C++", "and compilers where \"command line\" makes sense, this is typically", "arg_declaration_code(self): \"\"\"Return the declaration code as a string.\"\"\" arg_strings =", "PyObjects are done also. This code got a lot uglier", "function_catalog.fast_cache(code,func) function_cache[code] = func return results except TypeError as msg:", "isn't necessary. support_code : str, optional A string of valid", "\\ ' PyObject* raw_globals;\\n' \\ '#endif\\n' \\ ' raw_locals =", "1}, optional If 1, the C++ code is compiled every", "global dict # as input. from __future__ import absolute_import, print_function", "if local_dict is None: local_dict = call_frame.f_locals if global_dict is", "'.join(self.arg_specs.py_variables()) if py_vars: init_values = py_vars + ' = NULL;\\n\\n'", "header strings need to be in a form than can", "return_val.disown(); \\n\" \\ \"} \\n\" all_code = self.function_declaration_code() + \\", "pass # if we get here, the function wasn't found", "when compiling the code. The list might look like ``[\"<vector>\",\"'my_header'\"]``.", "KeyError: pass # 2. try function catalog try: results =", "with correct signature not found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame", "inline. global function_catalog call_frame = sys._getframe().f_back if local_dict is None:", "cause all floating point values to be cast as float", "is a dictionary of values that should be used as", "*/ \\n' \\ + decl_code + \\ ' /* inline", "specified the local dictionary of the calling function is used.", "class inline_ext_function(ext_tools.ext_function): # Some specialization is needed for inline extension", "when compiling the source files in 'sources'. For platforms and", "to link with (e.g. object files not implied by 'sources',", "args return function_decls class inline_ext_module(ext_tools.ext_module): def __init__(self,name,compiler=''): ext_tools.ext_module.__init__(self,name,compiler) self._build_information.append(common_info.inline_info()) function_cache", "to take a local and global dict # as input.", "be used as the local scope for the C/C++ code.", "= '' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\ '&py__locals,'\\ '&py__globals))\\n'\\ ' return NULL;\\n'", "NameError(msg) # 3. try persistent catalog module_dir = global_dict.get('__file__',None) function_list", "call_frame.f_globals if force: module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir,", "the function. This relies on the auto_downcast and # type", "examine it. verbose has no effect if the compilation isn't", "' /* argument conversion code */ \\n' \\ + decl_code", "global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code, headers=headers, customize=customize,", "Error\": pass else: raise NameError(msg) # 3. try persistent catalog", "str, optional The name of compiler to use when compiling.", "fly. Variables in the local and global Python scope are", "the function. See :mod:`scipy.weave.base_info` for more details. (not sure this'll", "Python scripts. ``inline()`` compiles and executes C/C++ code on the", "+ extension_name. swig_opts : [string] Any extra options to pass", "compiler command line). undef_macros : [string] List of macros to", "but for other platforms it could be anything. extra_link_args :", "*py__locals = NULL;\\n' \\ 'PyObject *py__globals = NULL;\\n' py_objects =", "the beginning of the message, but I don't know #", "extension module # that will contain the function. # storage_dir", "/* argument conversion code */ \\n' \\ + decl_code +", "type conversions section of the main documentation for examples. auto_downcast", "type factories setting ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) mod.add_function(ext_func) #", "compilers where \"command line\" makes sense, this is typically a", "it took. 2 prints out the command lines for the", "this grabs the local variables from the *previous* call #", "\\ \" {\\n \\n\" \\ \" return_val = Py_None; \\n\"", "signature not found') def inline_function_code(code,arg_names,local_dict=None, global_dict=None,auto_downcast=1, type_converters=None,compiler=''): call_frame = sys._getframe().f_back", "and how long it took. 2 prints out the command", "names that should be transferred from Python into the C/C++", "which typically export exactly one symbol: \"init\" + extension_name. swig_opts", "of values that should be used as the global scope", "C++ code declaring extra code that might be needed by", "os.path.split(module_path) mod = inline_ext_module(module_name,compiler) # create the function. This relies", "dictionary of the calling function is used. force : {0,", "Parameters ---------------- Relevant :mod:`distutils` keywords. These are duplicated from <NAME>'s", "has type double or double complex, all variables maintain their", "the extension depends on. language : string Extension language (i.e.", "it'll probably use the same compiler that was used when", "if(!(PyObject*)return_val && !exception_occurred)\\n\" \\ \" {\\n \\n\" \\ \" return_val", "List of library names (not filenames or paths) to link", "\\n\" \\ \" exception_occurred = 1; \\n\" \\ \"} \\n\"", "the local dictionary of the calling function is used. global_dict", "function wasn't found raise ValueError('function with correct signature not found')", "module_dir, compiler='', verbose=1, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, **kw): #", "module_dir = global_dict.get('__file__',None) func = compile_function(code,arg_names,local_dict, global_dict,module_dir, compiler=compiler, verbose=verbose, support_code=support_code,", "listed below. Also, the keyword arguments for distutils extension modules", "+ module_name) func = eval(module_name+'.compiled_func') finally: del sys.path[0] return func", "# compile code in correct location, with the given compiler", "for header in headers: mod.customize.add_header(header) # it's nice to let", "variable names that should be transferred from Python into the", "that the extension depends on. language : string Extension language", "the default, specify them here. Look in the type conversions", "function. This relies on the auto_downcast and # type factories", "of command-line arguments, but for other platforms it could be", "below. Also, the keyword arguments for distutils extension modules are", "linking object files together to create the extension (or to", "absolute_import, print_function import sys import os from . import ext_tools", "code. customize : base_info.custom_info, optional An alternative way to specify", "where 'value' is either the string to define it to", "import common_info from numpy.core.multiarray import _get_ndarray_c_version ndarray_api_version = '/* NDARRAY", "command line). undef_macros : [string] List of macros to undefine", "function is used. force : {0, 1}, optional If 1,", "user_path_list to come in here. # the PYTHONCOMPILED environment variable", "Python. Cygwin's behavior should be similar. verbose : {0,1,2}, optional", ": [str], optional A list of Python variable names that", "cache. function_list = function_catalog.get_functions_fast(code) for func in function_list: try: results", "Numeric arrays are of type float. If even one of", "depends : [string] List of files that the extension depends", "convert Python data types to C/C++ data types. If you'd", "is compiled every time inline is called. This is really", "Similar interpretation as for 'extra_compile_args'. export_symbols : [string] List of", "extra options to pass to SWIG if a source file", "indent(catch_code,4) + \\ return_code return all_code def python_function_definition_code(self): args =", "[str], optional A list of Python variable names that should", "module. if support_code: mod.customize.add_support_code(support_code) # add the extra headers needed", "more details. (not sure this'll be used much). type_converters :", "the code to create the local dict as a string.\"\"\"", "mod = inline_ext_module(module_name,compiler) # create the function. This relies on", "runtime_library_dirs : [string] List of directories to search for C/C++", "code is compiled every time inline is called. This is", "is used. global_dict : dict, optional If specified, it is", "pasted at the end of a ``#include`` statement in the", "that should be used as the global scope for the", "``[\"<vector>\",\"'my_header'\"]``. Note that the header strings need to be in", "except TypeError as msg: # should specify argument types here.", "functions, classes, or structures. headers : [str], optional A list", "of symbols to be exported from a shared extension. Not", "return results except TypeError as msg: # should specify argument", "modules are accepted to specify extra information needed for compiling.", "distutils. (I should add 'gcc' though to this). On windows,", "verbose has no effect if the compilation isn't necessary. support_code", "compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return ext_func.function_code() def compile_function(code,arg_names,local_dict,global_dict, module_dir, compiler='',", "functions def function_declaration_code(self): code = 'static PyObject* %s(PyObject*self, PyObject* args)\\n{\\n'", "function is used. global_dict : dict, optional If specified, it", "code. If `global_dict` is not specified, the global dictionary of", "global_dict : dict, optional If specified, it is a dictionary", "needed by the function to the module. for header in", "extension_name. swig_opts : [string] Any extra options to pass to", "def arg_declaration_code(self): \"\"\"Return the declaration code as a string.\"\"\" arg_strings", "' = NULL;\\n\\n' else: init_values = '' parse_tuple = 'if(!PyArg_ParseTuple(args,\"OO:compiled_func\",'\\", "every time inline is called. This is really only useful", "in self.arg_specs] return \"\".join(arg_strings) def function_code(self): from .ext_tools import indent", "of # checking the beginning of the message, but I", "for distutils extension modules are accepted to specify extra information", "An alternative way to specify `support_code`, `headers`, etc. needed by", "has the .i extension. depends : [string] List of files", "[arg.declaration_code(inline=1) for arg in self.arg_specs] return \"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return", "3. build the function except ValueError: # compile the library", "double complex, all variables maintain their standard types. newarr_converter :", "at link time. libraries : [string] List of library names", "\\ indent(catch_code,4) + \\ return_code return all_code def python_function_definition_code(self): args", "return NULL;\\n' return declare_return + declare_py_objects + \\ init_values +", "try local cache try: results = apply(function_cache[code],(local_dict,global_dict)) return results except", "files to link with (e.g. object files not implied by", "self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4) + \\ try_code + \\ indent(catch_code,4)", "be detected from the source extensions if not provided. See", "+ \\ try_code + \\ indent(catch_code,4) + \\ return_code return", "the values understood by distutils. (I should add 'gcc' though", "def function_code(self): from .ext_tools import indent decl_code = indent(self.arg_declaration_code(),4) cleanup_code", "gets compiled, as the # slowdown is very noticeable. if", "*/ \\n' \\ + function_code + \\ ' /*I would", "List of extra files to link with (e.g. object files", "local_dict = call_frame.f_locals if global_dict is None: global_dict = call_frame.f_globals", "used when compiling Python. Cygwin's behavior should be similar. verbose", ": [string] Any extra platform- and compiler-specific information to use", "string Extension language (i.e. \"c\", \"c++\", \"objc\"). Will be detected", "call_frame.f_locals if global_dict is None: global_dict = call_frame.f_globals if force:", "python path. try: sys.path.insert(0,storage_dir) exec('import ' + module_name) func =", "NameError(msg) except KeyError: pass # 2. try function catalog try:", "[arg.local_dict_code() for arg in self.arg_specs] return \"\".join(arg_strings) def function_code(self): from", "cleanup_code = indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4) # local_dict_code = indent(self.arg_local_dict_code(),4)", "declaring extra code that might be needed by your compiled", "not implied by 'sources', static libraries that must be explicitly", "module_path = function_catalog.unique_module_name(code, module_dir) storage_dir, module_name = os.path.split(module_path) mod =", "as well as all the compiler names understood by distutils.", "return_val;\\n' \\ 'int exception_occurred = 0;\\n' \\ 'PyObject *py__locals =", "directory where it lives is in the python path. try:", "the fly. Variables in the local and global Python scope", "contain the function. # storage_dir = catalog.intermediate_dir() code = ndarray_api_version", "verbose=1, support_code=None, headers=[], customize=None, type_converters=None, auto_downcast=1, **kw): # figure out", "(slash-separated) for portability. Source files may be C, C++, SWIG", "files not implied by 'sources', static libraries that must be", "a return statement. Instead it should assign results that need", "fill in changed locals and globals here...*/ \\n' \\ '", "the global dictionary of the calling function is used. force", "[arg.cleanup_code() for arg in self.arg_specs] return \"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return", "the \"build_ext\" command as source for a Python extension. ..", "extra_compile_args : [string] Any extra platform- and compiler-specific information to", "extension (or to create a new static Python interpreter). Similar", "extensions, this is when the extension is loaded). extra_objects :", "compiling. Parameters ---------- code : string A string of valid", "to define it without a particular value (equivalent of \"#define", "the global scope for the C/C++ code. If `global_dict` is", "extension. depends : [string] List of files that the extension", "hope. function_catalog = catalog.catalog() class inline_ext_function(ext_tools.ext_function): # Some specialization is", "= indent(self.code_block,4) # local_dict_code = indent(self.arg_local_dict_code(),4) try_code = \\ '", "Unix form (slash-separated) for portability. Source files may be C,", "by distutils. (I should add 'gcc' though to this). On", "This should really have its own error type, instead of", "makes sense, this is typically a list of command-line arguments,", "for a Python extension. .. note:: The `module_path` file is", "shared extension. Not used on all platforms, and not generally", "add the extra headers needed by the function to the", "C/C++ code through a special argument called return_val. Also, the", "in self.arg_specs] return \"\".join(arg_strings) def arg_cleanup_code(self): \"\"\"Return the cleanup code", "scope are also available in the C/C++ code. Values are", "results = apply(function_cache[code],(local_dict,global_dict)) return results except TypeError as msg: msg", "= self.function_declaration_code() + \\ indent(self.parse_tuple_code(),4) + \\ try_code + \\", "return all_code def python_function_definition_code(self): args = (self.name, self.name) function_decls =", "very noticeable. if verbose > 0: print('<weave: compiling>') # compile", "= indent(self.arg_declaration_code(),4) cleanup_code = indent(self.arg_cleanup_code(),4) function_code = indent(self.code_block,4) # local_dict_code", "ndarray_api_version + '\\n' + code module_path = function_catalog.unique_module_name(code, module_dir) storage_dir,", "keyword arguments for distutils extension modules are accepted to specify", "optional Unused. Other Parameters ---------------- Relevant :mod:`distutils` keywords. These are", "else: raise TypeError(msg) except NameError as msg: msg = str(msg).strip()", "Unix form for portability). define_macros : [(name : string, value", "for PyArg_ParseTuple. Variable declarations for all PyObjects are done also.", "using a 2-tuple, where 'value' is either the string to", "= call_frame.f_globals ext_func = inline_ext_function('compiled_func',code,arg_names, local_dict,global_dict,auto_downcast, type_converters=type_converters) from . import", "customize (a custom_info object), then set the module customization. if", "'.join(self.arg_specs.py_pointers()) if py_objects: declare_py_objects = 'PyObject ' + py_objects +", "local_dict is not specified the local dictionary of the calling", "module_dir = global_dict.get('__file__',None) function_list = function_catalog.get_functions(code,module_dir) for func in function_list:", "the C/C++ code through a special argument called return_val. Also,", "catch_code = \"catch(...) \\n\" \\ \"{ \\n\" + \\ \"", "PyObject* args)\\n{\\n' return code % self.name def parse_tuple_code(self): \"\"\" Create", "arg in self.arg_specs] return \"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return the code", "customization. if customize: mod.customize = customize # add the extra", "instead of # checking the beginning of the message, but", "all the compiler names understood by distutils. On Unix, it'll", "msg[:16] == \"Conversion Error\": pass else: raise NameError(msg) except KeyError:", "interpretation as for 'extra_compile_args'. export_symbols : [string] List of symbols", "should re-write compiled functions to take a local and global", "arrays has type double or double complex, all variables maintain", "type_converters=type_converters) from . import build_tools compiler = build_tools.choose_compiler(compiler) ext_func.set_compiler(compiler) return", "[string] List of library names (not filenames or paths) to", "by the function. See :mod:`scipy.weave.base_info` for more details. (not sure", "add the extra \"support code\" needed by the function to", "module_dir) storage_dir, module_name = os.path.split(module_path) mod = inline_ext_module(module_name,compiler) # create", "verbose > 0: print('<weave: compiling>') # compile code in correct", "results = attempt_function_call(code,local_dict,global_dict) # 3. build the function except ValueError:", "in self.arg_specs] return \"\".join(arg_strings) def arg_local_dict_code(self): \"\"\"Return the code to", "whatever else is recognized by the \"build_ext\" command as source", "newarr_converter : int, optional Unused. Other Parameters ---------------- Relevant :mod:`distutils`", "how long it took. 2 prints out the command lines", "function_catalog call_frame = sys._getframe().f_back if local_dict is None: local_dict =", "double or double complex, all variables maintain their standard types.", "code */ \\n' \\ + function_code + \\ ' /*I", "define; each macro is defined using a 2-tuple, where 'value'", "set of type conversions than the default, specify them here.", "types here. pass # if we get here, the function", "local dictionary of the calling function is used. global_dict :", "strings specifying header files to use when compiling the code.", "extension modules are accepted to specify extra information needed for", "particular value (equivalent of \"#define FOO\" in source or -DFOO", "decl_code + \\ ' /* inline code */ \\n' \\", "# local_dict_code = indent(self.arg_local_dict_code(),4) try_code = \\ ' try \\n'", "auto_downcast=auto_downcast, **kw) function_catalog.add_function(code,func,module_dir) results = attempt_function_call(code,local_dict,global_dict) return results def attempt_function_call(code,local_dict,global_dict):", "import the module and return the function. Make sure #", "optional A list of strings specifying header files to use", "self.name def template_declaration_code(self): code = 'template<class T>\\n' \\ 'static PyObject*", "come in here. # the PYTHONCOMPILED environment variable offers the", "how much information is printed during the compile phase of", "it could be anything. extra_link_args : [string] Any extra platform-", "from the *previous* call # frame -- that is the", "when anything gets compiled, as the # slowdown is very", "here. Look in the type conversions section of the main" ]
[ "\"\"\"Revert a 'system' configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id)", "ConfigurationOverrideStrategy \"\"\" self._base_config_path = base_config_path self._owner = owner self._group =", "self._collect_revision_files(group_name) for path in removed: operating_system.remove(path, force=True, as_root=self._requires_root) def get(self,", "group string :param codec Codec for reading/writing of the particular", "OneFileOverrideStrategy if None. This strategy should be compatible with very", "a given configuration override. Remove the whole group if 'change_id'", "the override within the group. :type change_id string \"\"\" def", "if the base file is always the most current version", ":type group string :param codec Codec for reading/writing of the", "file as a Python dict. \"\"\" base_options = operating_system.read_file( self._base_config_path,", "Update the existing file. current = operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root)", ":param change_id The name of the override within the group.", "configuration files. It is responsible for validating user inputs and", "configuration. Its base functionality includes reading and writing configuration files.", "self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self, base_config_path, owner, group, codec,", "self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options, as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner, self._group, as_root=self._requires_root)", "in compliance with the License. You may obtain # a", "by the ConfigurationManager. \"\"\" @abc.abstractmethod def exists(self, group_name, change_id): \"\"\"Return", "The name of the override within the group. :type change_id", "revision file is no longer needed if there are no", "of configuration. :returns: Updates to the base revision as a", "to used to order user/system sets, 'n' is an index", "order on the base revision. Write the results to the", "all existing overrides (both system and user). :param contents Contents", "files. It is responsible for validating user inputs and requests.", "order in which the # groups get applied. System groups", "unless specified otherwise (i.e. SYSTEM_POST_USER_GROUP # will be used). SYSTEM_PRE_USER_GROUP", "configuration. The 'system' values will be re-applied over this override.", "revision_file = self._find_revision_file(group_name, change_id) if revision_file: removed.add(revision_file) else: # Remove", "over the # user group, unless specified otherwise (i.e. SYSTEM_POST_USER_GROUP", "group_name, change_id): return self._find_revision_file(group_name, change_id) is not None def apply(self,", "self.refresh_cache() def has_system_override(self, change_id): \"\"\"Return whether a given 'system' change", "self._collect_revision_files(group_name) if current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1]) last_index_match", "\"\"\" self._base_config_path = base_config_path self._owner = owner self._group = group", "and requests. When supplied an override strategy it allows the", "in which the # groups get applied. System groups are", "isinstance(options, dict): # Serialize a dict of options for writing.", "Reserved. # # Licensed under the Apache License, Version 2.0", "as_root=self._requires_root) changes = self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes, base_revision) operating_system.write_file( self._base_config_path,", "self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a strategy for datastores that", "return 0 def _collect_revision_files(self, group_name='.+'): \"\"\"Collect and return a sorted", "six from trove.guestagent.common import guestagent_utils from trove.guestagent.common import operating_system from", "user-defined setting. :param options Configuration changes. :type options string or", "management of datastore configuration. Its base functionality includes reading and", "file. :type base_config_path string :param owner Owner of the configuration", "two camps; pre-user # and post-user. In general system overrides", "a given revision exists. \"\"\" @abc.abstractmethod def apply(self, group_name, change_id,", "change_id, options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user overrides\"\"\" return", "the most current file in a given group. \"\"\" current_files", "self._build_rev_name_pattern(group_name, change_id) found = operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root) return", "Updates to the base revision as a Python dict. \"\"\"", "for path in removed: operating_system.remove(path, force=True, as_root=self._requires_root) def get(self, group_name,", "imported into the base configuration file which never changes itself.", "datastore defines its strategy explicitly to avoid upgrade compatibility issues", "override any user-defined setting. :param options Configuration changes. :type options", "operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes = self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes,", "options dict \"\"\" @abc.abstractmethod def remove(self, group_name, change_id=None): \"\"\"Rollback a", "# Create a new file. last_revision_index = self._get_last_file_index(group_name) revision_file =", "return a sorted list of paths to existing revision files.", "Owner of the configuration files. :type owner string :param group", "OneFileOverrideStrategy(revision_dir) else: self._override_strategy = override_strategy self._override_strategy.configure( base_config_path, owner, group, codec,", "configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID):", "group_name string :param change_id The name of the override within", "change_id, options) def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user' change", "sub-directory at the location of the configuration file. revision_dir =", "if 'change_id' is None. :param group_name The group the override", "owner Owner of the configuration and revision files. :type owner", "format of override files is: '<set prefix>-<n>-<group name>.<ext>' where 'set", "def refresh_cache(self): self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles", "def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user' change to the", "which the # groups get applied. System groups are divided", "codec=self._codec, as_root=self._requires_root) def parse_updates(self): parsed_options = {} for path in", "updates applied to the base revision as a single dict.", "of the configuration file (applying overrides if any) and parse", "= OneFileOverrideStrategy(revision_dir) else: self._override_strategy = override_strategy self._override_strategy.configure( base_config_path, owner, group,", "to in writing, software # distributed under the License is", "\"\"\"Revert a 'user' configuration change. \"\"\" self._remove_override(self.USER_GROUP, change_id) def _remove_override(self,", "or agreed to in writing, software # distributed under the", "group. :type change_id string :param options Configuration changes. :type options", "# groups get applied. System groups are divided into two", "\"\"\"Return True if there currently are any revision files. \"\"\"", "last_file_name) if last_index_match: return int(last_index_match.group(1)) return 0 def _collect_revision_files(self, group_name='.+'):", "is None: self.refresh_cache() return self._value_cache.get(key, default) def parse_configuration(self): \"\"\"Read contents", "same id already exists. :param group_name The group the override", "should be sorted in the same order in which they", "\"\"\" :param base_config_path Path to the configuration file. :type base_config_path", "change_id The name of the override within the group. :type", "as_root=self._requires_root) and (len(self._collect_revision_files()) > 0)) def _get_last_file_index(self, group_name): \"\"\"Get the", "in a given group. \"\"\" current_files = self._collect_revision_files(group_name) if current_files:", "can be used. It would typically be configured by the", "the configuration file. :type base_config_path string :param owner Owner of", "name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern, last_file_name)", "or dict \"\"\" if isinstance(options, dict): # Serialize a dict", "a given 'system' change exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or", "Apache License, Version 2.0 (the \"License\"); you may # not", "string or dict \"\"\" self._apply_override(self.USER_GROUP, change_id, options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID):", "All Rights Reserved. # # Licensed under the Apache License,", "current value at a given key or 'default'. \"\"\" if", "override file is simply deleted when the override is removed.", "configuration and revision files. :type group string :param codec Codec", "% (group_name, last_revision_index + 1, change_id), self._revision_ext) else: # Update", "to the directory for import files. :type revision_dir string :param", "used). SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP = '50-system'", "License, Version 2.0 (the \"License\"); you may # not use", "base_options) return base_options def save_configuration(self, options): \"\"\"Write given contents to", "= revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self, base_config_path, owner,", "given revision exists. \"\"\" @abc.abstractmethod def apply(self, group_name, change_id, options):", "override is applied or removed a new configuration file is", "the configuration. System overrides are always applied after all user", "self._owner, self._group, as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize", "not use this file except in compliance with the License.", "else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options, as_root=self._requires_root) operating_system.chown( self._base_config_path,", "def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id)", "self._value_cache = None if not override_strategy: # Use OneFile strategy", "the revisions in a # sub-directory at the location of", "= self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern, last_file_name) if", "contents of a given configuration override :param group_name The group", "file in a given group. \"\"\" current_files = self._collect_revision_files(group_name) if", "_initialize_import_directory(self): \"\"\"Lazy-initialize the directory for imported revision files. \"\"\" if", "= guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file, options, codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file,", "override within the group. :type change_id string \"\"\" @abc.abstractmethod def", "list of paths to existing revision files. The files should", "not isinstance(options, dict): # Deserialize the options into a dict", "the order in which overrides within their set got applied.", "configuration changes and apply them in order on the base", "removed = self._collect_revision_files(group_name) for path in removed: operating_system.remove(path, force=True, as_root=self._requires_root)", "configuration override :param group_name The group the override belongs to.", "which they were applied. \"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory(", "most current file in a given group. \"\"\" current_files =", ":param requires_root Whether the strategy requires superuser privileges. :type requires_root", "the same id already exists. :param group_name The group the", "\"\"\" @abc.abstractmethod def remove(self, group_name, change_id=None): \"\"\"Rollback a given configuration", "the Trove API. - System overrides - 'internal' configuration changes", "codec self._requires_root = requires_root self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT)", "def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user' configuration change. \"\"\" self._remove_override(self.USER_GROUP,", "It is responsible for validating user inputs and requests. When", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "self._apply_override( group_name, change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id, options) self.refresh_cache() def", "True if there currently are any revision files. \"\"\" return", "\"License\"); you may # not use this file except in", "self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system' configuration change. \"\"\"", "are any revision files. \"\"\" return (operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root)", "class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a strategy for datastores that do", "as_root=self._requires_root) def get(self, group_name, change_id): revision_file = self._find_revision_file(group_name, change_id) return", "Codec for reading/writing of the particular configuration format. :type codec", "of override files is: '<set prefix>-<n>-<group name>.<ext>' where 'set prefix'", "_get_last_file_index(self, group_name): \"\"\"Get the index of the most current file", "return self._find_revision_file(group_name, change_id) is not None def apply(self, group_name, change_id,", "the user to manage configuration overrides as well. \"\"\" #", "is no longer needed if there are no # overrides.", "= operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return parsed_options @property def", "requires_root boolean :param override_strategy Strategy used to manage configuration overrides", "options Configuration changes. :type options string or dict \"\"\" self._apply_override(self.USER_GROUP,", "group, codec, requires_root) def get_value(self, key, default=None): \"\"\"Return the current", "return next(iter(found), None) def _build_rev_name_pattern(self, group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN %", "the configuration files. :type group string :param codec Codec for", "= requires_root def exists(self, group_name, change_id): return self._find_revision_file(group_name, change_id) is", "default=None): \"\"\"Return the current value at a given key or", "a dict if not already. self._apply_override( group_name, change_id, self._codec.deserialize(options)) else:", "'20-user' SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID = 'common'", "When an override is applied or removed a new configuration", "same order in which they were applied. \"\"\" name_pattern =", "= self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def parse_updates(self): parsed_options", "overrides (e.g. ImportOverrideStrategy). Defaults to OneFileOverrideStrategy if None. This strategy", "got applied. \"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir, revision_ext):", "self._group = group self._codec = codec self._requires_root = requires_root self._value_cache", "the strategy requires superuser privileges. :type requires_root boolean \"\"\" self._base_config_path", "new configuration file is generated by applying all changes on", "a Python dict. \"\"\" return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy", "contents string or dict \"\"\" if isinstance(options, dict): # Serialize", "otherwise (i.e. SYSTEM_POST_USER_GROUP # will be used). SYSTEM_PRE_USER_GROUP = '10-system'", "imported revision files. \"\"\" if not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner,", "apply(self, group_name, change_id, options): \"\"\"Apply given options on the most", ":param revision_ext Extension of revision files. :type revision_ext string \"\"\"", "def __init__(self, revision_dir): \"\"\" :param revision_dir Path to the directory", "configuration revision. Update if a file with the same id", "self._base_config_path, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def", "pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self, group_name, change_id): name_pattern = self._build_rev_name_pattern(group_name, change_id)", "dict \"\"\" @abc.abstractmethod def remove(self, group_name, change_id=None): \"\"\"Rollback a given", "applying all changes on a saved-off base revision. \"\"\" BASE_REVISION_NAME", "the override is removed. We keep two sets of override", "Extension of revision files. :type revision_ext string \"\"\" self._revision_dir =", "@abc.abstractmethod def apply(self, group_name, change_id, options): \"\"\"Apply given options on", "found = operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root) return next(iter(found), None)", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "Owner of the configuration and revision files. :type owner string", "return self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self): \"\"\"Gather all configuration changes and", "'system' change to the configuration. System overrides are always applied", "self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return base_options def save_configuration(self, options): \"\"\"Write given", "revision_dir): \"\"\" :param revision_dir Path to the directory for import", "used. It would typically be configured by the ConfigurationManager. \"\"\"", "It will be regenerated based on the current # configuration", "options) self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system' configuration change.", "change_id): return self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self): \"\"\"Gather all configuration changes", "\"\"\" return (operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files()) > 0))", "self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options) def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a", "re-applied over this override. :param options Configuration changes. :type options", "change exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def", "self._revision_dir = revision_dir self._revision_ext = revision_ext def configure(self, base_config_path, owner,", "all configuration changes and apply them in order on the", "base revision file is no longer needed if there are", "group_name, change_id): \"\"\"Return whether a given revision exists. \"\"\" @abc.abstractmethod", "Version 2.0 (the \"License\"); you may # not use this", "it allows the user to manage configuration overrides as well.", "to manage configuration overrides (e.g. ImportOverrideStrategy). Defaults to OneFileOverrideStrategy if", "as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root)", "operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root) updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return", "# Copyright 2015 Tesora Inc. # All Rights Reserved. #", "this strategy. A strategy needs to be configured before it", "self._requires_root = requires_root def exists(self, group_name, change_id): return self._find_revision_file(group_name, change_id)", "= revision_ext def configure(self, base_config_path, owner, group, codec, requires_root): \"\"\"", "set got applied. \"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir,", "and remove configuration overrides. \"\"\" @abc.abstractmethod def configure(self, *args, **kwargs):", "exists. \"\"\" @abc.abstractmethod def apply(self, group_name, change_id, options): \"\"\"Apply given", "- 'internal' configuration changes applied by the guestagent. The name", "already exists. :param group_name The group the override belongs to.", "as_root=self._requires_root) operating_system.chown( revision_file, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root)", "group_name, change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id, options) self.refresh_cache() def remove_system_override(self,", "User overrides - configuration overrides applied by the user via", "\"\"\" if not os.path.exists(self._base_revision_file): # Initialize the file with the", "def has_revisions(self): \"\"\"Return True if there currently are any revision", "revision_ext): \"\"\" :param revision_dir Path to the directory for import", "file on the first 'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def get(self,", "\"\"\"Get the index of the most current file in a", "group. :type change_id string \"\"\" def parse_updates(self): \"\"\"Return all updates", "changes. :type options string or dict \"\"\" group_name = (", "change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id, options) self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID):", "DEFAULT_CHANGE_ID = 'common' def __init__(self, base_config_path, owner, group, codec, requires_root=False,", "The files should be sorted in the same order in", "within the group. :type change_id string \"\"\" def parse_updates(self): \"\"\"Return", "compliance with the License. You may obtain # a copy", "def get_value(self, key, default=None): \"\"\"Return the current value at a", "guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file, options, codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file, self._owner,", "def has_system_override(self, change_id): \"\"\"Return whether a given 'system' change exists.", "where 'set prefix' is to used to order user/system sets,", "def save_configuration(self, options): \"\"\"Write given contents to the base configuration", "two sets of override files in a separate directory. -", "def _build_rev_name_pattern(self, group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN % (group_name, change_id, self._revision_ext)", "if not override_strategy: # Use OneFile strategy by default. Store", "current version of configuration. :returns: Updates to the base revision", "configuration file. :type contents string or dict \"\"\" if isinstance(options,", "change_id string \"\"\" @abc.abstractmethod def get(self, group_name, change_id=None): \"\"\"Return the", "# # Unless required by applicable law or agreed to", "{} for path in self._collect_revision_files(): options = operating_system.read_file(path, codec=self._codec, as_root=self._requires_root)", "group, codec, requires_root): \"\"\" :param base_config_path Path to the configuration", "# The base revision file is no longer needed if", "os.path.exists(self._base_revision_file): # Initialize the file with the current configuration contents", "self.refresh_cache() def refresh_cache(self): self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy", "= {} for path in self._collect_revision_files(): options = operating_system.read_file(path, codec=self._codec,", "the configuration file (applying overrides if any) and parse it", "\"\"\"Lazy-initialize the directory for imported revision files. \"\"\" if not", "last_revision_index + 1, change_id), self._revision_ext) else: # Update the existing", "guestagent_utils.update_dict(options, parsed_options) return parsed_options @property def has_revisions(self): \"\"\"Return True if", "guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner, group, codec, requires_root)", "change_id): revision_file = self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def", "change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self,", "Its base functionality includes reading and writing configuration files. It", "change_id) if revision_file is None: # Create a new file.", "avoid upgrade compatibility issues in case the default implementation changes", "The 'system' values will be re-applied over this override. :param", "None) def _build_rev_name_pattern(self, group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN % (group_name, change_id,", "recommended each datastore defines its strategy explicitly to avoid upgrade", "the Import Strategy to keep the overrides internally. When an", "overrides internally. When an override is applied or removed a", "were applied. \"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True,", "Serialize a dict of options for writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP)", "has_revisions(self): \"\"\"Return True if there currently are any revision files.", "def get(self, group_name, change_id): revision_file = self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file,", "requires_root Whether the manager requires superuser privileges. :type requires_root boolean", "change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user' configuration change. \"\"\"", "strategy for datastores that do not support multiple configuration files.", "None: # Create a new file. last_revision_index = self._get_last_file_index(group_name) revision_file", "def apply(self, group_name, change_id, options): \"\"\"Apply given options on the", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "override. :param options Configuration changes. :type options string or dict", "operating_system.chown( revision_file, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def", "that do not support multiple configuration files. It uses the", "revision. Write the results to the configuration file. \"\"\" if", "License. import abc import os import re import six from", "configuration files. The strategy provides functionality to enumerate, apply and", "be used). SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP =", "files. :type revision_dir string :param revision_ext Extension of revision files.", "os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern, last_file_name) if last_index_match: return int(last_index_match.group(1)) return", "an empty dict if the base file is always the", "if any) and parse it into a dict. :returns: Configuration", "change_id) def _regenerate_base_configuration(self): \"\"\"Gather all configuration changes and apply them", "string \"\"\" self._revision_dir = revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def", ":type revision_ext string \"\"\" self._revision_dir = revision_dir self._revision_ext = revision_ext", "_build_rev_name_pattern(self, group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN % (group_name, change_id, self._revision_ext) class", "may obtain # a copy of the License at #", "else: # Remove the entire group. removed = self._collect_revision_files(group_name) for", "validating user inputs and requests. When supplied an override strategy", "override within the group. :type change_id string :param options Configuration", "Unless required by applicable law or agreed to in writing,", "= self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return base_options def save_configuration(self, options): \"\"\"Write", "self._override_strategy.remove(group_name, change_id) self.refresh_cache() def refresh_cache(self): self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class", "version of configuration. :returns: Updates to the base revision as", "change_id)) def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a 'system' change", "self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def parse_updates(self): parsed_options =", "needed if there are no # overrides. It will be", "\"\"\" def parse_updates(self): \"\"\"Return all updates applied to the base", "last_index_match: return int(last_index_match.group(1)) return 0 def _collect_revision_files(self, group_name='.+'): \"\"\"Collect and", "not support multiple configuration files. It uses the Import Strategy", "current configuration revision. Update if a file with the same", "or 'default'. \"\"\" if self._value_cache is None: self.refresh_cache() return self._value_cache.get(key,", "# Initialize the file with the current configuration contents if", "= 'overrides' DEFAULT_CHANGE_ID = 'common' def __init__(self, base_config_path, owner, group,", "guestagent_utils from trove.guestagent.common import operating_system from trove.guestagent.common.operating_system import FileMode class", "self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root) return next(iter(found), None) def _build_rev_name_pattern(self, group_name='.+',", "= os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern, last_file_name) if last_index_match: return int(last_index_match.group(1))", "= group self._codec = codec self._requires_root = requires_root def exists(self,", "to the configuration. System overrides are always applied after all", "operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group, force=True, as_root=self._requires_root) def remove(self, group_name, change_id=None):", "codec, requires_root=False, override_strategy=None): \"\"\" :param base_config_path Path to the configuration", "configured by the ConfigurationManager. \"\"\" @abc.abstractmethod def exists(self, group_name, change_id):", "\"\"\" if isinstance(options, dict): # Serialize a dict of options", "in a separate directory. - User overrides - configuration overrides", "class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration files. The strategy provides functionality", "support multiple configuration files. It uses the Import Strategy to", "strategy. A strategy needs to be configured before it can", "either express or implied. See the # License for the", "revision_file, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self):", "if revision_file: removed.add(revision_file) else: # Remove the entire group. removed", "def _get_last_file_index(self, group_name): \"\"\"Get the index of the most current", "change_id) if revision_file: removed.add(revision_file) else: # Remove the entire group.", "may # not use this file except in compliance with", "codec, requires_root) def get_value(self, key, default=None): \"\"\"Return the current value", "applied by the user via the Trove API. - System", "owner string :param group Group of the configuration files. :type", "boolean :param override_strategy Strategy used to manage configuration overrides (e.g.", "= operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root) options = guestagent_utils.update_dict(options, current) operating_system.write_file(", "strategy by default. Store the revisions in a # sub-directory", "files. :type revision_ext string \"\"\" self._revision_dir = revision_dir self._revision_ext =", "current = operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root) options = guestagent_utils.update_dict(options, current)", "options = operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return parsed_options @property", "next(iter(found), None) def _build_rev_name_pattern(self, group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN % (group_name,", "USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID", "Import Strategy to keep the overrides internally. When an override", "@six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration files. The strategy provides", "is not None def apply(self, group_name, change_id, options): self._initialize_import_directory() revision_file", "= '20-user' SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID =", "the configuration files. :type owner string :param group Group of", "as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize the directory", "paths to existing revision files. The files should be sorted", "(applying overrides if any) and parse it into a dict.", "which never changes itself. An override file is simply deleted", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "= re.match(name_pattern, last_file_name) if last_index_match: return int(last_index_match.group(1)) return 0 def", "string \"\"\" self._revision_dir = revision_dir self._revision_ext = revision_ext def configure(self,", "change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert", "options on the most current configuration revision. Update if a", "user to manage configuration overrides as well. \"\"\" # Configuration", ":type owner string :param group Group of the configuration files.", "Create a new file. last_revision_index = self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path(", "revision files. :type revision_ext string \"\"\" self._revision_dir = revision_dir self._revision_ext", "existing file. current = operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root) options =", "the existing file. current = operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root) options", "requires superuser privileges. :type requires_root boolean :param override_strategy Strategy used", "are always applied after all user changes so that they", "def apply(self, group_name, change_id, options): self._initialize_import_directory() revision_file = self._find_revision_file(group_name, change_id)", "group, codec, requires_root=False, override_strategy=None): \"\"\" :param base_config_path Path to the", "given group. \"\"\" current_files = self._collect_revision_files(group_name) if current_files: name_pattern =", "'system' change exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id))", "change_id, options): self._initialize_import_directory() revision_file = self._find_revision_file(group_name, change_id) if revision_file is", "files. \"\"\" return (operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files()) >", "should be compatible with very much any datastore. It is", "options): self._initialize_import_directory() revision_file = self._find_revision_file(group_name, change_id) if revision_file is None:", "whole group if 'change_id' is None. :param group_name The group", "requests. When supplied an override strategy it allows the user", "specified otherwise (i.e. SYSTEM_POST_USER_GROUP # will be used). SYSTEM_PRE_USER_GROUP =", "exist. operating_system.copy( self._base_config_path, self._base_revision_file, force=True, preserve=True, as_root=self._requires_root) base_revision = operating_system.read_file(", "file is always the most current version of configuration. :returns:", "no longer needed if there are no # overrides. It", "existing overrides (both system and user). :param contents Contents of", "ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps overrides in separate files that get", "and writing configuration files. It is responsible for validating user", "revision exists. \"\"\" @abc.abstractmethod def apply(self, group_name, change_id, options): \"\"\"Apply", "over this override. :param options Configuration changes. :type options string", "get_value(self, key, default=None): \"\"\"Return the current value at a given", "revision files. \"\"\" return (operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files())", "current # configuration file on the first 'apply()'. operating_system.remove(self._base_revision_file, force=True,", "codec Codec for reading/writing of the particular configuration format. :type", "base_config_path, owner, group, codec, requires_root): \"\"\" :param base_config_path Path to", "( self.SYSTEM_PRE_USER_GROUP if pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options) def", "overrides in separate files that get imported into the base", "a # sub-directory at the location of the configuration file.", "Configuration changes. :type options string or dict \"\"\" self._apply_override(self.USER_GROUP, change_id,", "\"\"\" ConfigurationManager is responsible for management of datastore configuration. Its", "the configuration file. \"\"\" if not os.path.exists(self._base_revision_file): # Initialize the", "\"\"\"Return the current value at a given key or 'default'.", "group_name, change_id): revision_file = self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root)", "applied. \"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern,", "def configure(self, base_config_path, owner, group, codec, requires_root): \"\"\" :param base_config_path", "The names determine the order in which the # groups", "import files. :type revision_dir string \"\"\" self._revision_dir = revision_dir self._import_strategy", "# Deserialize the options into a dict if not already.", "override :param group_name The group the override belongs to. :type", "self._apply_override(self.USER_GROUP, change_id, options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user overrides\"\"\"", "the group. :type change_id string \"\"\" def parse_updates(self): \"\"\"Return all", "return self._import_strategy.exists(group_name, change_id) def apply(self, group_name, change_id, options): self._import_strategy.apply(group_name, change_id,", "file with the same id already exists. :param group_name The", "is always the most current version of configuration. :returns: Updates", "self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes, base_revision) operating_system.write_file( self._base_config_path, updated_revision, codec=self._codec, as_root=self._requires_root)", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", ":type group_name string :param change_id The name of the override", "apply them in order on the base revision. Write the", "= codec self._requires_root = requires_root self._value_cache = None if not", "revision as a single dict. Return an empty dict if", "dict. \"\"\" base_options = operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root) updates =", "is: '<set prefix>-<n>-<group name>.<ext>' where 'set prefix' is to used", "= '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID = 'common' def __init__(self,", ":param owner Owner of the configuration files. :type owner string", "(len(self._collect_revision_files()) > 0)) def _get_last_file_index(self, group_name): \"\"\"Get the index of", "revision_file = guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' % (group_name, last_revision_index + 1,", "and limitations # under the License. import abc import os", "file. last_revision_index = self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' %", "overrides - configuration overrides applied by the user via the", "class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps overrides in separate files that", "\"\"\" if not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group, force=True, as_root=self._requires_root)", "name>.<ext>' where 'set prefix' is to used to order user/system", "changes so that they override any user-defined setting. :param options", "We keep two sets of override files in a separate", "group names. The names determine the order in which the", "revision_file, options, codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file, self._owner, self._group, as_root=self._requires_root) operating_system.chmod(", "self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self, group_name, change_id, options): if not isinstance(options,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", ":type base_config_path string :param owner Owner of the configuration files.", "into two camps; pre-user # and post-user. In general system", "**kwargs): \"\"\"Configure this strategy. A strategy needs to be configured", "the directory for import files. :type revision_dir string :param revision_ext", "the group. :type change_id string :param options Configuration changes. :type", "import os import re import six from trove.guestagent.common import guestagent_utils", "in order on the base revision. Write the results to", "files should be sorted in the same order in which", "boolean \"\"\" self._base_config_path = base_config_path self._owner = owner self._group =", "self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if not self._import_strategy.has_revisions: # The base", "options for writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path,", "for path in self._collect_revision_files(): options = operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options,", "functionality to enumerate, apply and remove configuration overrides. \"\"\" @abc.abstractmethod", "Group of the configuration and revision files. :type group string", "as_root=self._requires_root) updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return base_options def save_configuration(self,", "order in which overrides within their set got applied. \"\"\"", "Configuration group names. The names determine the order in which", "codec, requires_root): \"\"\" :param base_config_path Path to the configuration file.", "allows the user to manage configuration overrides as well. \"\"\"", "change_id) def _apply_override(self, group_name, change_id, options): if not isinstance(options, dict):", "isinstance(options, dict): # Deserialize the options into a dict if", "group. :type change_id string \"\"\" @abc.abstractmethod def get(self, group_name, change_id=None):", "current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern,", "writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options, as_root=self._requires_root)", "any datastore. It is recommended each datastore defines its strategy", "change_id) def _remove_override(self, group_name, change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache() def refresh_cache(self):", "from trove.guestagent.common import operating_system from trove.guestagent.common.operating_system import FileMode class ConfigurationManager(object):", "not override_strategy: # Use OneFile strategy by default. Store the", "as_root=self._requires_root) def remove(self, group_name, change_id=None): removed = set() if change_id:", "for writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options,", "def get(self, group_name, change_id): return self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self): \"\"\"Gather", "responsible for management of datastore configuration. Its base functionality includes", "not self._import_strategy.has_revisions: # The base revision file is no longer", "self._override_strategy = OneFileOverrideStrategy(revision_dir) else: self._override_strategy = override_strategy self._override_strategy.configure( base_config_path, owner,", "revision_dir string \"\"\" self._revision_dir = revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT)", "user). :param contents Contents of the configuration file. :type contents", "directory for import files. :type revision_dir string \"\"\" self._revision_dir =", "will be used). SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP", "apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user' change to the configuration.", "the specific language governing permissions and limitations # under the", "changes = self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes, base_revision) operating_system.write_file( self._base_config_path, updated_revision,", "change_id, options): self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration() def remove(self, group_name, change_id=None):", "import abc import os import re import six from trove.guestagent.common", "is responsible for validating user inputs and requests. When supplied", "FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def has_system_override(self, change_id): \"\"\"Return whether a given", "under the Apache License, Version 2.0 (the \"License\"); you may", "responsible for validating user inputs and requests. When supplied an", "based on the current # configuration file on the first", "on a saved-off base revision. \"\"\" BASE_REVISION_NAME = 'base' REVISION_EXT", "get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id) def", "to avoid upgrade compatibility issues in case the default implementation", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "self._owner, self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def has_system_override(self,", "names. The names determine the order in which the #", "strategy explicitly to avoid upgrade compatibility issues in case the", "self._revision_dir = revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self, base_config_path,", "changes on a saved-off base revision. \"\"\" BASE_REVISION_NAME = 'base'", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "requires_root) def get_value(self, key, default=None): \"\"\"Return the current value at", "= codec self._requires_root = requires_root def exists(self, group_name, change_id): return", "import FileMode class ConfigurationManager(object): \"\"\" ConfigurationManager is responsible for management", "within the group. :type change_id string :param options Configuration changes.", "if revision_file is None: # Create a new file. last_revision_index", "required by applicable law or agreed to in writing, software", "group_name, change_id, options): self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration() def remove(self, group_name,", "get(self, group_name, change_id): revision_file = self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file, codec=self._codec,", "to enumerate, apply and remove configuration overrides. \"\"\" @abc.abstractmethod def", "the override belongs to. :type group_name string :param change_id The", "directory for import files. :type revision_dir string :param revision_ext Extension", "- configuration overrides applied by the user via the Trove", "user via the Trove API. - System overrides - 'internal'", "to the configuration file. \"\"\" if not os.path.exists(self._base_revision_file): # Initialize", ":param group Group of the configuration files. :type group string", ":type base_config_path string :param owner Owner of the configuration and", "agreed to in writing, software # distributed under the License", "OneFile strategy by default. Store the revisions in a #", "limitations # under the License. import abc import os import", "format. :type codec StreamCodec :param requires_root Whether the strategy requires", "exists(self, group_name, change_id): return self._import_strategy.exists(group_name, change_id) def apply(self, group_name, change_id,", "distributed under the License is distributed on an \"AS IS\"", "removed: operating_system.remove(path, force=True, as_root=self._requires_root) def get(self, group_name, change_id): revision_file =", "revision files. \"\"\" if not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group,", "options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user' change to the configuration. The", "file is no longer needed if there are no #", "the particular configuration format. :type codec StreamCodec :param requires_root Whether", "dict): # Serialize a dict of options for writing. self.save_configuration(self._codec.serialize(options))", "base_revision = operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes = self._import_strategy.parse_updates() updated_revision", "CONDITIONS OF ANY KIND, either express or implied. See the", "string :param owner Owner of the configuration files. :type owner", "or removed a new configuration file is generated by applying", "prefix' is to used to order user/system sets, 'n' is", "compatibility issues in case the default implementation changes in the", "not os.path.exists(self._base_revision_file): # Initialize the file with the current configuration", "into a dict if not already. self._apply_override( group_name, change_id, self._codec.deserialize(options))", "manage configuration overrides as well. \"\"\" # Configuration group names.", "'<set prefix>-<n>-<group name>.<ext>' where 'set prefix' is to used to", "os import re import six from trove.guestagent.common import guestagent_utils from", "the base file is always the most current version of", "last_revision_index = self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' % (group_name,", "change_id) self.refresh_cache() def refresh_cache(self): self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object):", "options): \"\"\"Apply given options on the most current configuration revision.", "= group self._codec = codec self._requires_root = requires_root self._base_revision_file =", "if there are no # overrides. It will be regenerated", "the group. :type change_id string \"\"\" @abc.abstractmethod def get(self, group_name,", "generated by applying all changes on a saved-off base revision.", "ImportOverrideStrategy). Defaults to OneFileOverrideStrategy if None. This strategy should be", "and parse it into a dict. :returns: Configuration file as", ":type override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path = base_config_path self._owner = owner", "Store the revisions in a # sub-directory at the location", "get applied over the # user group, unless specified otherwise", ":type options dict \"\"\" @abc.abstractmethod def remove(self, group_name, change_id=None): \"\"\"Rollback", "be regenerated based on the current # configuration file on", "is generated by applying all changes on a saved-off base", "the # groups get applied. System groups are divided into", "\"\"\"Collect and return a sorted list of paths to existing", "Inc. # All Rights Reserved. # # Licensed under the", ":type requires_root boolean :param override_strategy Strategy used to manage configuration", "self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner, group,", "removed. We keep two sets of override files in a", "currently are any revision files. \"\"\" return (operating_system.exists( self._revision_dir, is_directory=True,", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", ":param owner Owner of the configuration and revision files. :type", "keep two sets of override files in a separate directory.", "# Configuration group names. The names determine the order in", "change_id='.+'): return self.FILE_NAME_PATTERN % (group_name, change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This", "or dict \"\"\" group_name = ( self.SYSTEM_PRE_USER_GROUP if pre_user else", "group_name, change_id): return self._import_strategy.exists(group_name, change_id) def apply(self, group_name, change_id, options):", "handles configuration files. The strategy provides functionality to enumerate, apply", "self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options, as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner, self._group,", "general system overrides will get applied over the # user", "not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group, force=True, as_root=self._requires_root) def remove(self,", "dict \"\"\" if isinstance(options, dict): # Serialize a dict of", "revision_file: removed.add(revision_file) else: # Remove the entire group. removed =", "See the # License for the specific language governing permissions", "def configure(self, *args, **kwargs): \"\"\"Configure this strategy. A strategy needs", "1, change_id), self._revision_ext) else: # Update the existing file. current", ":param options Configuration changes. :type options dict \"\"\" @abc.abstractmethod def", "exists. :param group_name The group the override belongs to. :type", "separate directory. - User overrides - configuration overrides applied by", "<reponame>sapcc/trove # Copyright 2015 Tesora Inc. # All Rights Reserved.", "self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self): \"\"\"Gather all configuration changes and apply", "when the override is removed. We keep two sets of", "self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes = self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes, base_revision)", "changes. :type options string or dict \"\"\" self._apply_override(self.USER_GROUP, change_id, options)", "on the base revision. Write the results to the configuration", "requires_root=False, override_strategy=None): \"\"\" :param base_config_path Path to the configuration file.", "self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def has_system_override(self, change_id): \"\"\"Return whether a", "base_config_path string :param owner Owner of the configuration and revision", "law or agreed to in writing, software # distributed under", "as a Python dict. \"\"\" base_options = operating_system.read_file( self._base_config_path, codec=self._codec,", "change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user' configuration", "self._group, as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize the", "'default'. \"\"\" if self._value_cache is None: self.refresh_cache() return self._value_cache.get(key, default)", "configuration files. It uses the Import Strategy to keep the", "string \"\"\" @abc.abstractmethod def get(self, group_name, change_id=None): \"\"\"Return the contents", "'user' change to the configuration. The 'system' values will be", "if it # does not exist. operating_system.copy( self._base_config_path, self._base_revision_file, force=True,", "the current # configuration file on the first 'apply()'. operating_system.remove(self._base_revision_file,", "ConfigurationManager(object): \"\"\" ConfigurationManager is responsible for management of datastore configuration.", "remove configuration overrides. \"\"\" @abc.abstractmethod def configure(self, *args, **kwargs): \"\"\"Configure", "group_name, change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache() def refresh_cache(self): self._value_cache = self.parse_configuration()", "= 'rev' def __init__(self, revision_dir): \"\"\" :param revision_dir Path to", "parsed_options) return parsed_options @property def has_revisions(self): \"\"\"Return True if there", "index of the most current file in a given group.", "files. The files should be sorted in the same order", "\"\"\"Read contents of the configuration file (applying overrides if any)", "whether a given revision exists. \"\"\" @abc.abstractmethod def apply(self, group_name,", "removed = set() if change_id: # Remove a given file.", "the configuration and revision files. :type group string :param codec", "return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID,", "configuration file which never changes itself. An override file is", "empty dict if the base file is always the most", "as_root=self._requires_root) self.refresh_cache() def has_system_override(self, change_id): \"\"\"Return whether a given 'system'", "overrides are always applied after all user changes so that", "a saved-off base revision. \"\"\" BASE_REVISION_NAME = 'base' REVISION_EXT =", "refresh_cache(self): self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration", "Path to the directory for import files. :type revision_dir string", "self._import_strategy.has_revisions: # The base revision file is no longer needed", "key, default=None): \"\"\"Return the current value at a given key", "# # Licensed under the Apache License, Version 2.0 (the", "of the configuration files. :type group string :param codec Codec", ":type revision_dir string :param revision_ext Extension of revision files. :type", "options) def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user' change to", "of paths to existing revision files. The files should be", "FileMode class ConfigurationManager(object): \"\"\" ConfigurationManager is responsible for management of", "there are no # overrides. It will be regenerated based", "self._import_strategy.exists(group_name, change_id) def apply(self, group_name, change_id, options): self._import_strategy.apply(group_name, change_id, options)", "revision_ext Extension of revision files. :type revision_ext string \"\"\" self._revision_dir", "abc import os import re import six from trove.guestagent.common import", "itself. An override file is simply deleted when the override", "= group self._codec = codec self._requires_root = requires_root self._value_cache =", "change_id string \"\"\" def parse_updates(self): \"\"\"Return all updates applied to", "revision files. The files should be sorted in the same", "change_id): return self._import_strategy.exists(group_name, change_id) def apply(self, group_name, change_id, options): self._import_strategy.apply(group_name,", "if not already. self._apply_override( group_name, change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id,", "= base_config_path self._owner = owner self._group = group self._codec =", "= operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes = self._import_strategy.parse_updates() updated_revision =", "a single dict. Return an empty dict if the base", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "change_id): \"\"\"Return whether a given revision exists. \"\"\" @abc.abstractmethod def", "any revision files. \"\"\" return (operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root) and", "group_name The group the override belongs to. :type group_name string", "the configuration file. :type contents string or dict \"\"\" if", "updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return base_options def save_configuration(self, options):", "group_name, change_id=None): removed = set() if change_id: # Remove a", "single dict. Return an empty dict if the base file", "of the most current file in a given group. \"\"\"", "self._base_revision_file, force=True, preserve=True, as_root=self._requires_root) base_revision = operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root)", "None if not override_strategy: # Use OneFile strategy by default.", "always the most current version of configuration. :returns: Updates to", "if not self._import_strategy.has_revisions: # The base revision file is no", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "file (applying overrides if any) and parse it into a", "keeps overrides in separate files that get imported into the", "if not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group, force=True, as_root=self._requires_root) def", "for import files. :type revision_dir string :param revision_ext Extension of", "get(self, group_name, change_id=None): \"\"\"Return the contents of a given configuration", "strategy requires superuser privileges. :type requires_root boolean \"\"\" self._base_config_path =", "revisions in a # sub-directory at the location of the", "the index of the most current file in a given", "sets, 'n' is an index used to keep track of", "re.match(name_pattern, last_file_name) if last_index_match: return int(last_index_match.group(1)) return 0 def _collect_revision_files(self,", "for validating user inputs and requests. When supplied an override", "will get applied over the # user group, unless specified", "base file is always the most current version of configuration.", "at a given key or 'default'. \"\"\" if self._value_cache is", "override belongs to. :type group_name string :param change_id The name", "post-user. In general system overrides will get applied over the", ":param group_name The group the override belongs to. :type group_name", "its strategy explicitly to avoid upgrade compatibility issues in case", "\"\"\" self._apply_override(self.USER_GROUP, change_id, options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user", "Path to the configuration file. :type base_config_path string :param owner", "by the user via the Trove API. - System overrides", "in case the default implementation changes in the future. :type", "base_options = operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root) updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates,", "'rev' def __init__(self, revision_dir): \"\"\" :param revision_dir Path to the", "- System overrides - 'internal' configuration changes applied by the", "parsed_options = {} for path in self._collect_revision_files(): options = operating_system.read_file(path,", "owner self._group = group self._codec = codec self._requires_root = requires_root", "the directory for import files. :type revision_dir string \"\"\" self._revision_dir", "change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if not self._import_strategy.has_revisions: #", "'system' values will be re-applied over this override. :param options", "(operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files()) > 0)) def _get_last_file_index(self,", "all user changes so that they override any user-defined setting.", "internally. When an override is applied or removed a new", "string :param group Group of the configuration files. :type group", "applied. \"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir, revision_ext): \"\"\"", "the first 'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def get(self, group_name, change_id):", "files. :type group string :param codec Codec for reading/writing of", "@abc.abstractmethod def remove(self, group_name, change_id=None): \"\"\"Rollback a given configuration override.", "revision_file is None: # Create a new file. last_revision_index =", "# user group, unless specified otherwise (i.e. SYSTEM_POST_USER_GROUP # will", "It uses the Import Strategy to keep the overrides internally.", "change_id): name_pattern = self._build_rev_name_pattern(group_name, change_id) found = operating_system.list_files_in_directory( self._revision_dir, recursive=True,", "group the override belongs to. :type group_name string :param change_id", ":type options string or dict \"\"\" self._apply_override(self.USER_GROUP, change_id, options) def", "separate files that get imported into the base configuration file", "on the current # configuration file on the first 'apply()'.", "owner, group, codec, requires_root) def exists(self, group_name, change_id): return self._import_strategy.exists(group_name,", "self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a 'system'", "guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir) else: self._override_strategy = override_strategy", "\"\"\"Configure this strategy. A strategy needs to be configured before", "divided into two camps; pre-user # and post-user. In general", "self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if not self._import_strategy.has_revisions: # The base revision", "used to order user/system sets, 'n' is an index used", "options Configuration changes. :type options dict \"\"\" @abc.abstractmethod def remove(self,", "user=self._owner, group=self._group, force=True, as_root=self._requires_root) def remove(self, group_name, change_id=None): removed =", "parsed_options @property def has_revisions(self): \"\"\"Return True if there currently are", "def _remove_override(self, group_name, change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache() def refresh_cache(self): self._value_cache", "case the default implementation changes in the future. :type override_strategy", "in a # sub-directory at the location of the configuration", "_apply_override(self, group_name, change_id, options): if not isinstance(options, dict): # Deserialize", "OF ANY KIND, either express or implied. See the #", "self._owner = owner self._group = group self._codec = codec self._requires_root", "\"\"\" self._revision_dir = revision_dir self._revision_ext = revision_ext def configure(self, base_config_path,", "# does not exist. operating_system.copy( self._base_config_path, self._base_revision_file, force=True, preserve=True, as_root=self._requires_root)", "in writing, software # distributed under the License is distributed", "the License. import abc import os import re import six", "configuration overrides (e.g. ImportOverrideStrategy). Defaults to OneFileOverrideStrategy if None. This", "if there currently are any revision files. \"\"\" return (operating_system.exists(", "\"\"\"Gather all configuration changes and apply them in order on", "operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def has_system_override(self, change_id): \"\"\"Return whether", "Use OneFile strategy by default. Store the revisions in a", "a 'system' change to the configuration. System overrides are always", "changes in the future. :type override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path =", "(e.g. ImportOverrideStrategy). Defaults to OneFileOverrideStrategy if None. This strategy should", "be configured before it can be used. It would typically", "dict. Return an empty dict if the base file is", "their set got applied. \"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def __init__(self,", "\"\"\"This is a strategy for datastores that do not support", "self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration files.", "configuration file (applying overrides if any) and parse it into", "self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user' configuration change.", "current file in a given group. \"\"\" current_files = self._collect_revision_files(group_name)", "FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir, revision_ext): \"\"\" :param revision_dir", "if current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1]) last_index_match =", "belongs to. :type group_name string :param change_id The name of", "self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options, as_root=self._requires_root) operating_system.chown(", "change_id): \"\"\"Return whether a given 'system' change exists. \"\"\" return", "by the guestagent. The name format of override files is:", "change to the configuration. System overrides are always applied after", "be re-applied over this override. :param options Configuration changes. :type", "operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def parse_updates(self): parsed_options = {} for path", "as_root=self._requires_root) def parse_updates(self): parsed_options = {} for path in self._collect_revision_files():", "self._base_config_path = base_config_path self._owner = owner self._group = group self._codec", "order user/system sets, 'n' is an index used to keep", "self._value_cache is None: self.refresh_cache() return self._value_cache.get(key, default) def parse_configuration(self): \"\"\"Read", "format. :type codec StreamCodec :param requires_root Whether the manager requires", "revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir) else: self._override_strategy", "applied after all user changes so that they override any", "with the License. You may obtain # a copy of", "writing configuration files. It is responsible for validating user inputs", "'overrides' DEFAULT_CHANGE_ID = 'common' def __init__(self, base_config_path, owner, group, codec,", "revision_dir string :param revision_ext Extension of revision files. :type revision_ext", "name format of override files is: '<set prefix>-<n>-<group name>.<ext>' where", "def remove(self, group_name, change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if", "= self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes, base_revision) operating_system.write_file( self._base_config_path, updated_revision, codec=self._codec,", "options into a dict if not already. self._apply_override( group_name, change_id,", "self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id, options) self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert", "force=True, as_root=self._requires_root) def remove(self, group_name, change_id=None): removed = set() if", "datastore configuration. Its base functionality includes reading and writing configuration", "pre-user # and post-user. In general system overrides will get", "privileges. :type requires_root boolean \"\"\" self._base_config_path = base_config_path self._owner =", "the override within the group. :type change_id string \"\"\" @abc.abstractmethod", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "change_id, options) self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system' configuration", "current_files = self._collect_revision_files(group_name) if current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name =", "= r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir, revision_ext): \"\"\" :param revision_dir Path", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "overrides. It will be regenerated based on the current #", "does not exist. operating_system.copy( self._base_config_path, self._base_revision_file, force=True, preserve=True, as_root=self._requires_root) base_revision", "= self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration files. The", "multiple configuration files. It uses the Import Strategy to keep", "provides functionality to enumerate, apply and remove configuration overrides. \"\"\"", "future. :type override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path = base_config_path self._owner =", "or dict \"\"\" self._apply_override(self.USER_GROUP, change_id, options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get", "be sorted in the same order in which they were", "DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID = 'common' def __init__(self, base_config_path, owner,", "revision files. :type owner string :param group Group of the", "\"\"\"Apply a 'system' change to the configuration. System overrides are", "\"\"\"Write given contents to the base configuration file. Remove all", "a given key or 'default'. \"\"\" if self._value_cache is None:", "except in compliance with the License. You may obtain #", "to manage configuration overrides as well. \"\"\" # Configuration group", "apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a 'system' change to the", "def exists(self, group_name, change_id): return self._find_revision_file(group_name, change_id) is not None", "\"\"\"Return all updates applied to the base revision as a", "override files in a separate directory. - User overrides -", "to be configured before it can be used. It would", "def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a 'system' change to", "change_id: # Remove a given file. revision_file = self._find_revision_file(group_name, change_id)", "group self._codec = codec self._requires_root = requires_root self._value_cache = None", "the base configuration file which never changes itself. An override", "# distributed under the License is distributed on an \"AS", "prefix>-<n>-<group name>.<ext>' where 'set prefix' is to used to order", "and apply them in order on the base revision. Write", "id already exists. :param group_name The group the override belongs", "# Unless required by applicable law or agreed to in", "(i.e. SYSTEM_POST_USER_GROUP # will be used). SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP", "determine the order in which the # groups get applied.", "base functionality includes reading and writing configuration files. It is", "Update if a file with the same id already exists.", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "is responsible for management of datastore configuration. Its base functionality", "Python dict. \"\"\" return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps", "sets of override files in a separate directory. - User", "of the particular configuration format. :type codec StreamCodec :param requires_root", "with the same id already exists. :param group_name The group", ":returns: Configuration file as a Python dict. \"\"\" base_options =", "change_id) return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def parse_updates(self): parsed_options = {}", "within their set got applied. \"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def", "each datastore defines its strategy explicitly to avoid upgrade compatibility", "def _regenerate_base_configuration(self): \"\"\"Gather all configuration changes and apply them in", "trove.guestagent.common import guestagent_utils from trove.guestagent.common import operating_system from trove.guestagent.common.operating_system import", "@abc.abstractmethod def exists(self, group_name, change_id): \"\"\"Return whether a given revision", "requires_root boolean \"\"\" self._base_config_path = base_config_path self._owner = owner self._group", ":param contents Contents of the configuration file. :type contents string", "codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return parsed_options @property def has_revisions(self): \"\"\"Return", "System groups are divided into two camps; pre-user # and", "'n' is an index used to keep track of the", "superuser privileges. :type requires_root boolean \"\"\" self._base_config_path = base_config_path self._owner", ":type contents string or dict \"\"\" if isinstance(options, dict): #", "'internal' configuration changes applied by the guestagent. The name format", "group_name, change_id): return self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self): \"\"\"Gather all configuration", "self._revision_ext = revision_ext def configure(self, base_config_path, owner, group, codec, requires_root):", "revision_file = self._find_revision_file(group_name, change_id) return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def parse_updates(self):", "group_name, change_id, options): \"\"\"Apply given options on the most current", "= requires_root self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path,", "the entire group. removed = self._collect_revision_files(group_name) for path in removed:", "to existing revision files. The files should be sorted in", "values will be re-applied over this override. :param options Configuration", "specific language governing permissions and limitations # under the License.", "configuration overrides applied by the user via the Trove API.", "# not use this file except in compliance with the", "self.FILE_NAME_PATTERN % (group_name, change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a", "group. \"\"\" current_files = self._collect_revision_files(group_name) if current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name)", "self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner, group, codec, requires_root) def exists(self,", "string :param change_id The name of the override within the", "codec self._requires_root = requires_root def exists(self, group_name, change_id): return self._find_revision_file(group_name,", "if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if not self._import_strategy.has_revisions: # The", "self._revision_ext) else: # Update the existing file. current = operating_system.read_file(", "string \"\"\" def parse_updates(self): \"\"\"Return all updates applied to the", "includes reading and writing configuration files. It is responsible for", "configuration change. \"\"\" self._remove_override(self.USER_GROUP, change_id) def _remove_override(self, group_name, change_id): self._override_strategy.remove(group_name,", "the current value at a given key or 'default'. \"\"\"", "return parsed_options @property def has_revisions(self): \"\"\"Return True if there currently", "not None def apply(self, group_name, change_id, options): self._initialize_import_directory() revision_file =", "Deserialize the options into a dict if not already. self._apply_override(", "overrides (both system and user). :param contents Contents of the", "(group_name, last_revision_index + 1, change_id), self._revision_ext) else: # Update the", "requires_root): \"\"\" :param base_config_path Path to the configuration file. :type", "2015 Tesora Inc. # All Rights Reserved. # # Licensed", "system overrides will get applied over the # user group,", "configuration files. :type owner string :param group Group of the", "(self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False):", "under the License is distributed on an \"AS IS\" BASIS,", "overrides applied by the user via the Trove API. -", "= requires_root self._value_cache = None if not override_strategy: # Use", "will be re-applied over this override. :param options Configuration changes.", "the contents of a given configuration override :param group_name The", "pattern=name_pattern, as_root=self._requires_root) return next(iter(found), None) def _build_rev_name_pattern(self, group_name='.+', change_id='.+'): return", "return int(last_index_match.group(1)) return 0 def _collect_revision_files(self, group_name='.+'): \"\"\"Collect and return", "parse it into a dict. :returns: Configuration file as a", "longer needed if there are no # overrides. It will", "camps; pre-user # and post-user. In general system overrides will", "will be regenerated based on the current # configuration file", "this file except in compliance with the License. You may", "string or dict \"\"\" group_name = ( self.SYSTEM_PRE_USER_GROUP if pre_user", "def exists(self, group_name, change_id): \"\"\"Return whether a given revision exists.", "to. :type group_name string :param change_id The name of the", "def parse_updates(self): \"\"\"Return all updates applied to the base revision", "base revision as a Python dict. \"\"\" return {} class", "\"\"\"Rollback a given configuration override. Remove the whole group if", "file is generated by applying all changes on a saved-off", "owner, group, codec, requires_root=False, override_strategy=None): \"\"\" :param base_config_path Path to", "is an index used to keep track of the order", "file. revision_file = self._find_revision_file(group_name, change_id) if revision_file: removed.add(revision_file) else: #", "of options for writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file(", "self._codec = codec self._requires_root = requires_root self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir,", "the results to the configuration file. \"\"\" if not os.path.exists(self._base_revision_file):", "as a Python dict. \"\"\" return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import", "configuration contents if it # does not exist. operating_system.copy( self._base_config_path,", "a new configuration file is generated by applying all changes", ":type revision_dir string \"\"\" self._revision_dir = revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir,", "configuration overrides. \"\"\" @abc.abstractmethod def configure(self, *args, **kwargs): \"\"\"Configure this", "change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user' change to the configuration. The 'system'", "applied to the base revision as a single dict. Return", "group. removed = self._collect_revision_files(group_name) for path in removed: operating_system.remove(path, force=True,", "change_id) def apply(self, group_name, change_id, options): self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration()", "options): if not isinstance(options, dict): # Deserialize the options into", "options string or dict \"\"\" group_name = ( self.SYSTEM_PRE_USER_GROUP if", "overrides - 'internal' configuration changes applied by the guestagent. The", "file except in compliance with the License. You may obtain", "SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR", "file with the current configuration contents if it # does", "of the configuration file. :type contents string or dict \"\"\"", "OR CONDITIONS OF ANY KIND, either express or implied. See", "of the configuration files. :type owner string :param group Group", "operating_system.remove(path, force=True, as_root=self._requires_root) def get(self, group_name, change_id): revision_file = self._find_revision_file(group_name,", "revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self, base_config_path, owner, group,", "the user via the Trove API. - System overrides -", "revision_dir Path to the directory for import files. :type revision_dir", "is a strategy for datastores that do not support multiple", "the override within the group. :type change_id string :param options", "# Remove a given file. revision_file = self._find_revision_file(group_name, change_id) if", "permissions and limitations # under the License. import abc import", "codec StreamCodec :param requires_root Whether the manager requires superuser privileges.", "(both system and user). :param contents Contents of the configuration", "def parse_updates(self): parsed_options = {} for path in self._collect_revision_files(): options", "Initialize the file with the current configuration contents if it", "for reading/writing of the particular configuration format. :type codec StreamCodec", "they override any user-defined setting. :param options Configuration changes. :type", "to the base configuration file. Remove all existing overrides (both", "trove.guestagent.common.operating_system import FileMode class ConfigurationManager(object): \"\"\" ConfigurationManager is responsible for", "self._apply_override(group_name, change_id, options) def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply a 'user'", "a given file. revision_file = self._find_revision_file(group_name, change_id) if revision_file: removed.add(revision_file)", "Contents of the configuration file. :type contents string or dict", "reading and writing configuration files. It is responsible for validating", "def _collect_revision_files(self, group_name='.+'): \"\"\"Collect and return a sorted list of", "= operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root) updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options)", "base configuration file. Remove all existing overrides (both system and", "operating_system.write_file( self._base_config_path, options, as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner, self._group, as_root=self._requires_root) operating_system.chmod(", "= ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self, base_config_path, owner, group, codec, requires_root):", "user changes so that they override any user-defined setting. :param", "an override strategy it allows the user to manage configuration", "= codec self._requires_root = requires_root self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME,", ":param codec Codec for reading/writing of the particular configuration format.", "revision files. :type group string :param codec Codec for reading/writing", "they were applied. \"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir,", "group self._codec = codec self._requires_root = requires_root self._base_revision_file = guestagent_utils.build_file_path(", "\"\"\" @abc.abstractmethod def exists(self, group_name, change_id): \"\"\"Return whether a given", "parse_configuration(self): \"\"\"Read contents of the configuration file (applying overrides if", "revision. \"\"\" BASE_REVISION_NAME = 'base' REVISION_EXT = 'rev' def __init__(self,", "be compatible with very much any datastore. It is recommended", "change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system' configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP,", "= revision_dir self._revision_ext = revision_ext def configure(self, base_config_path, owner, group,", "as_root=self._requires_root) options = guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file, options, codec=self._codec, as_root=self._requires_root)", "with very much any datastore. It is recommended each datastore", "the base revision as a Python dict. \"\"\" return {}", "a given group. \"\"\" current_files = self._collect_revision_files(group_name) if current_files: name_pattern", "self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir) else: self._override_strategy = override_strategy self._override_strategy.configure( base_config_path,", "self._base_config_path, self._base_revision_file, force=True, preserve=True, as_root=self._requires_root) base_revision = operating_system.read_file( self._base_revision_file, codec=self._codec,", "configuration format. :type codec StreamCodec :param requires_root Whether the strategy", "which overrides within their set got applied. \"\"\" FILE_NAME_PATTERN =", "names determine the order in which the # groups get", "group_name='.+'): \"\"\"Collect and return a sorted list of paths to", "user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self, group_name, change_id, options):", "change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user' configuration change. \"\"\" self._remove_override(self.USER_GROUP, change_id) def", "__init__(self, base_config_path, owner, group, codec, requires_root=False, override_strategy=None): \"\"\" :param base_config_path", "SYSTEM_POST_USER_GROUP # will be used). SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP =", "operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return parsed_options @property def has_revisions(self):", "file which never changes itself. An override file is simply", "'set prefix' is to used to order user/system sets, 'n'", "into a dict. :returns: Configuration file as a Python dict.", "writing, software # distributed under the License is distributed on", "Python dict. \"\"\" base_options = operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root) updates", "and revision files. :type group string :param codec Codec for", "superuser privileges. :type requires_root boolean :param override_strategy Strategy used to", "the License. You may obtain # a copy of the", "use this file except in compliance with the License. You", "\"\"\"Return whether a given 'system' change exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP,", "order in which they were applied. \"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name)", "and post-user. In general system overrides will get applied over", "an index used to keep track of the order in", "revision_ext string \"\"\" self._revision_dir = revision_dir self._revision_ext = revision_ext def", "location of the configuration file. revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR)", "to the directory for import files. :type revision_dir string \"\"\"", "SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID = 'common' def", "\"\"\"ConfigurationOverrideStrategy handles configuration files. The strategy provides functionality to enumerate,", "particular configuration format. :type codec StreamCodec :param requires_root Whether the", "name of the override within the group. :type change_id string", ":type change_id string \"\"\" def parse_updates(self): \"\"\"Return all updates applied", "pre_user=False): \"\"\"Apply a 'system' change to the configuration. System overrides", "'50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides' DEFAULT_CHANGE_ID = 'common' def __init__(self, base_config_path,", ":type codec StreamCodec :param requires_root Whether the manager requires superuser", "so that they override any user-defined setting. :param options Configuration", "requires_root def exists(self, group_name, change_id): return self._find_revision_file(group_name, change_id) is not", "groups are divided into two camps; pre-user # and post-user.", "removed.add(revision_file) else: # Remove the entire group. removed = self._collect_revision_files(group_name)", "operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def get(self, group_name, change_id): return self._import_strategy.get(group_name, change_id)", "exists(self, group_name, change_id): \"\"\"Return whether a given revision exists. \"\"\"", "os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group, force=True, as_root=self._requires_root) def remove(self, group_name,", "None. This strategy should be compatible with very much any", "return (operating_system.exists( self._revision_dir, is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files()) > 0)) def", "\"\"\"Get the user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self, group_name,", "Defaults to OneFileOverrideStrategy if None. This strategy should be compatible", "None: self.refresh_cache() return self._value_cache.get(key, default) def parse_configuration(self): \"\"\"Read contents of", "group Group of the configuration and revision files. :type group", "express or implied. See the # License for the specific", "compatible with very much any datastore. It is recommended each", "system and user). :param contents Contents of the configuration file.", "the Apache License, Version 2.0 (the \"License\"); you may #", "into the base configuration file which never changes itself. An", "configuration changes applied by the guestagent. The name format of", "any) and parse it into a dict. :returns: Configuration file", "'common' def __init__(self, base_config_path, owner, group, codec, requires_root=False, override_strategy=None): \"\"\"", "overrides will get applied over the # user group, unless", "a 'user' change to the configuration. The 'system' values will", "group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN % (group_name, change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy):", "\"\"\" self._revision_dir = revision_dir self._import_strategy = ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self,", "configuration file is generated by applying all changes on a", "configuration files. :type group string :param codec Codec for reading/writing", "0)) def _get_last_file_index(self, group_name): \"\"\"Get the index of the most", "self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self, group_name,", "in removed: operating_system.remove(path, force=True, as_root=self._requires_root) def get(self, group_name, change_id): revision_file", "is simply deleted when the override is removed. We keep", "change_id=None): removed = set() if change_id: # Remove a given", "string :param revision_ext Extension of revision files. :type revision_ext string", "and user). :param contents Contents of the configuration file. :type", "operating_system.write_file( revision_file, options, codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file, self._owner, self._group, as_root=self._requires_root)", "user group, unless specified otherwise (i.e. SYSTEM_POST_USER_GROUP # will be", "regenerated based on the current # configuration file on the", "override. Remove the whole group if 'change_id' is None. :param", "'change_id' is None. :param group_name The group the override belongs", "a file with the same id already exists. :param group_name", "remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system' configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id)", "base_config_path Path to the configuration file. :type base_config_path string :param", "group if 'change_id' is None. :param group_name The group the", "used to manage configuration overrides (e.g. ImportOverrideStrategy). Defaults to OneFileOverrideStrategy", "it into a dict. :returns: Configuration file as a Python", "change_id=None): \"\"\"Return the contents of a given configuration override :param", "options, as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL,", "\"\"\"Return whether a given revision exists. \"\"\" @abc.abstractmethod def apply(self,", "enumerate, apply and remove configuration overrides. \"\"\" @abc.abstractmethod def configure(self,", "owner, group, codec, requires_root): \"\"\" :param base_config_path Path to the", "options) self._regenerate_base_configuration() def remove(self, group_name, change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id)", "contents Contents of the configuration file. :type contents string or", "whether a given 'system' change exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id)", "in the future. :type override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path = base_config_path", "codec self._requires_root = requires_root self._value_cache = None if not override_strategy:", "the configuration file. revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy =", "When supplied an override strategy it allows the user to", "_collect_revision_files(self, group_name='.+'): \"\"\"Collect and return a sorted list of paths", "contents to the base configuration file. Remove all existing overrides", "dict \"\"\" group_name = ( self.SYSTEM_PRE_USER_GROUP if pre_user else self.SYSTEM_POST_USER_GROUP)", "override_strategy: # Use OneFile strategy by default. Store the revisions", "changes and apply them in order on the base revision.", "options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP,", "group, unless specified otherwise (i.e. SYSTEM_POST_USER_GROUP # will be used).", "track of the order in which overrides within their set", "self._requires_root = requires_root self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure(", "= self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' % (group_name, last_revision_index", "is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files()) > 0)) def _get_last_file_index(self, group_name): \"\"\"Get", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "revision. Update if a file with the same id already", "self._codec = codec self._requires_root = requires_root def exists(self, group_name, change_id):", "\"\"\"Return the contents of a given configuration override :param group_name", "\"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a", "to the configuration file. :type base_config_path string :param owner Owner", "group_name, change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if not self._import_strategy.has_revisions:", "a dict. :returns: Configuration file as a Python dict. \"\"\"", "0 def _collect_revision_files(self, group_name='.+'): \"\"\"Collect and return a sorted list", "dict. \"\"\" return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps overrides", "overrides within their set got applied. \"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$'", "dict of options for writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP)", "groups get applied. System groups are divided into two camps;", "at the location of the configuration file. revision_dir = guestagent_utils.build_file_path(", "codec=self._codec, as_root=self._requires_root) options = guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file, options, codec=self._codec,", "self._base_config_path, codec=self._codec, as_root=self._requires_root) updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return base_options", "= self._build_rev_name_pattern(group_name, change_id) found = operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)", "the future. :type override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path = base_config_path self._owner", "License for the specific language governing permissions and limitations #", "of the override within the group. :type change_id string :param", ":type change_id string \"\"\" @abc.abstractmethod def get(self, group_name, change_id=None): \"\"\"Return", "configuration. System overrides are always applied after all user changes", "would typically be configured by the ConfigurationManager. \"\"\" @abc.abstractmethod def", "it can be used. It would typically be configured by", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "\"\"\" @abc.abstractmethod def get(self, group_name, change_id=None): \"\"\"Return the contents of", "Configuration file as a Python dict. \"\"\" base_options = operating_system.read_file(", "string or dict \"\"\" if isinstance(options, dict): # Serialize a", "group=self._group, force=True, as_root=self._requires_root) def remove(self, group_name, change_id=None): removed = set()", "FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize the directory for imported revision", "them in order on the base revision. Write the results", "uses the Import Strategy to keep the overrides internally. When", "\"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self, options,", "Rights Reserved. # # Licensed under the Apache License, Version", "override_strategy=None): \"\"\" :param base_config_path Path to the configuration file. :type", "used to keep track of the order in which overrides", "override within the group. :type change_id string \"\"\" def parse_updates(self):", "and return a sorted list of paths to existing revision", "to the base revision as a single dict. Return an", "def remove(self, group_name, change_id=None): \"\"\"Rollback a given configuration override. Remove", "options string or dict \"\"\" self._apply_override(self.USER_GROUP, change_id, options) def get_user_override(self,", "the configuration. The 'system' values will be re-applied over this", "self.parse_configuration() @six.add_metaclass(abc.ABCMeta) class ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration files. The strategy", "that get imported into the base configuration file which never", "dict \"\"\" self._apply_override(self.USER_GROUP, change_id, options) def get_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Get the", "self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def has_system_override(self, change_id):", "as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize the directory for imported revision files.", "directory for imported revision files. \"\"\" if not os.path.exists(self._revision_dir): operating_system.create_directory(", "# configuration file on the first 'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root)", "change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache() def refresh_cache(self): self._value_cache = self.parse_configuration() @six.add_metaclass(abc.ABCMeta)", "simply deleted when the override is removed. We keep two", "the # License for the specific language governing permissions and", "guestagent_utils.update_dict(updates, base_options) return base_options def save_configuration(self, options): \"\"\"Write given contents", ":param revision_dir Path to the directory for import files. :type", "reading/writing of the particular configuration format. :type codec StreamCodec :param", "files that get imported into the base configuration file which", "return self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self, group_name, change_id, options): if not", "\"\"\" :param revision_dir Path to the directory for import files.", "there currently are any revision files. \"\"\" return (operating_system.exists( self._revision_dir,", "# sub-directory at the location of the configuration file. revision_dir", "for datastores that do not support multiple configuration files. It", "r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir, revision_ext): \"\"\" :param revision_dir Path to", "configuration file. revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir)", "applied by the guestagent. The name format of override files", "- User overrides - configuration overrides applied by the user", "= None if not override_strategy: # Use OneFile strategy by", "self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner, group, codec, requires_root) def exists(self, group_name,", "much any datastore. It is recommended each datastore defines its", "on the most current configuration revision. Update if a file", "options = guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file, options, codec=self._codec, as_root=self._requires_root) operating_system.chown(", "change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply", "= '10-system' USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR =", "Configuration changes. :type options string or dict \"\"\" group_name =", "results to the configuration file. \"\"\" if not os.path.exists(self._base_revision_file): #", "if not os.path.exists(self._base_revision_file): # Initialize the file with the current", "a 'user' configuration change. \"\"\" self._remove_override(self.USER_GROUP, change_id) def _remove_override(self, group_name,", "codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( revision_file, FileMode.ADD_READ_ALL,", "= 'common' def __init__(self, base_config_path, owner, group, codec, requires_root=False, override_strategy=None):", "the options into a dict if not already. self._apply_override( group_name,", "'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def get(self, group_name, change_id): return self._import_strategy.get(group_name,", "Whether the manager requires superuser privileges. :type requires_root boolean :param", "if isinstance(options, dict): # Serialize a dict of options for", "self._base_config_path, options, as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path,", "revision_file, codec=self._codec, as_root=self._requires_root) options = guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file, options,", "explicitly to avoid upgrade compatibility issues in case the default", "within the group. :type change_id string \"\"\" @abc.abstractmethod def get(self,", "> 0)) def _get_last_file_index(self, group_name): \"\"\"Get the index of the", "last_index_match = re.match(name_pattern, last_file_name) if last_index_match: return int(last_index_match.group(1)) return 0", "never changes itself. An override file is simply deleted when", "as well. \"\"\" # Configuration group names. The names determine", "of the order in which overrides within their set got", "the file with the current configuration contents if it #", "a given configuration override :param group_name The group the override", "class ConfigurationManager(object): \"\"\" ConfigurationManager is responsible for management of datastore", "this override. :param options Configuration changes. :type options string or", "file. \"\"\" if not os.path.exists(self._base_revision_file): # Initialize the file with", "System overrides are always applied after all user changes so", "Return an empty dict if the base file is always", "as_root=self._requires_root) base_revision = operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes = self._import_strategy.parse_updates()", "\"\"\" base_options = operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root) updates = self._override_strategy.parse_updates()", "typically be configured by the ConfigurationManager. \"\"\" @abc.abstractmethod def exists(self,", "= guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir) else: self._override_strategy =", "remove(self, group_name, change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration() if not", "and revision files. :type owner string :param group Group of", "overrides as well. \"\"\" # Configuration group names. The names", ":param requires_root Whether the manager requires superuser privileges. :type requires_root", "System overrides - 'internal' configuration changes applied by the guestagent.", "self._override_strategy = override_strategy self._override_strategy.configure( base_config_path, owner, group, codec, requires_root) def", "It is recommended each datastore defines its strategy explicitly to", ":returns: Updates to the base revision as a Python dict.", "self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self, group_name, change_id): name_pattern =", "dict. :returns: Configuration file as a Python dict. \"\"\" base_options", "options): self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration() def remove(self, group_name, change_id=None): if", "An override file is simply deleted when the override is", "of the configuration and revision files. :type group string :param", "dict): # Deserialize the options into a dict if not", "given file. revision_file = self._find_revision_file(group_name, change_id) if revision_file: removed.add(revision_file) else:", "file. current = operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root) options = guestagent_utils.update_dict(options,", "return operating_system.read_file(revision_file, codec=self._codec, as_root=self._requires_root) def parse_updates(self): parsed_options = {} for", "codec=self._codec, as_root=self._requires_root) changes = self._import_strategy.parse_updates() updated_revision = guestagent_utils.update_dict(changes, base_revision) operating_system.write_file(", "operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root) return next(iter(found), None) def _build_rev_name_pattern(self,", "strategy provides functionality to enumerate, apply and remove configuration overrides.", "configure(self, *args, **kwargs): \"\"\"Configure this strategy. A strategy needs to", "# and post-user. In general system overrides will get applied", "\"\"\" @abc.abstractmethod def configure(self, *args, **kwargs): \"\"\"Configure this strategy. A", "changes itself. An override file is simply deleted when the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize the directory for imported", "remove(self, group_name, change_id=None): removed = set() if change_id: # Remove", "return self._value_cache.get(key, default) def parse_configuration(self): \"\"\"Read contents of the configuration", "# will be used). SYSTEM_PRE_USER_GROUP = '10-system' USER_GROUP = '20-user'", "are no # overrides. It will be regenerated based on", "you may # not use this file except in compliance", "\"\"\" # Configuration group names. The names determine the order", "\"\"\" FILE_NAME_PATTERN = r'%s-([0-9]+)-%s\\.%s$' def __init__(self, revision_dir, revision_ext): \"\"\" :param", "the configuration and revision files. :type owner string :param group", "it # does not exist. operating_system.copy( self._base_config_path, self._base_revision_file, force=True, preserve=True,", "is applied or removed a new configuration file is generated", "remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user' configuration change. \"\"\" self._remove_override(self.USER_GROUP, change_id)", "are divided into two camps; pre-user # and post-user. In", "saved-off base revision. \"\"\" BASE_REVISION_NAME = 'base' REVISION_EXT = 'rev'", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "else: # Update the existing file. current = operating_system.read_file( revision_file,", "override_strategy self._override_strategy.configure( base_config_path, owner, group, codec, requires_root) def get_value(self, key,", "self._group = group self._codec = codec self._requires_root = requires_root def", "the most current version of configuration. :returns: Updates to the", "a sorted list of paths to existing revision files. The", "else: self._override_strategy.apply(group_name, change_id, options) self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a", "ImportOverrideStrategy(revision_dir, self.REVISION_EXT) def configure(self, base_config_path, owner, group, codec, requires_root): \"\"\"", "entire group. removed = self._collect_revision_files(group_name) for path in removed: operating_system.remove(path,", "sorted list of paths to existing revision files. The files", "of the configuration and revision files. :type owner string :param", "of a given configuration override :param group_name The group the", "base_config_path string :param owner Owner of the configuration files. :type", "first 'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def get(self, group_name, change_id): return", "for the specific language governing permissions and limitations # under", "on the first 'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def get(self, group_name,", "_regenerate_base_configuration(self): \"\"\"Gather all configuration changes and apply them in order", "requires_root self._base_revision_file = guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner,", "options, codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( revision_file,", "return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self, group_name, change_id):", "change_id, options) self._regenerate_base_configuration() def remove(self, group_name, change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name,", "by applying all changes on a saved-off base revision. \"\"\"", "group self._codec = codec self._requires_root = requires_root def exists(self, group_name,", "_remove_override(self, group_name, change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache() def refresh_cache(self): self._value_cache =", "set() if change_id: # Remove a given file. revision_file =", "self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration() def remove(self, group_name, change_id=None): if self._import_strategy.has_revisions:", "save_configuration(self, options): \"\"\"Write given contents to the base configuration file.", "Configuration changes. :type options dict \"\"\" @abc.abstractmethod def remove(self, group_name,", "base_config_path self._owner = owner self._group = group self._codec = codec", "= operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root) return next(iter(found), None) def", "None. :param group_name The group the override belongs to. :type", "import operating_system from trove.guestagent.common.operating_system import FileMode class ConfigurationManager(object): \"\"\" ConfigurationManager", "needs to be configured before it can be used. It", "re import six from trove.guestagent.common import guestagent_utils from trove.guestagent.common import", "path in removed: operating_system.remove(path, force=True, as_root=self._requires_root) def get(self, group_name, change_id):", "to keep track of the order in which overrides within", "the base revision as a single dict. Return an empty", "self._codec = codec self._requires_root = requires_root self._value_cache = None if", "= ( self.SYSTEM_PRE_USER_GROUP if pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options)", "dict if not already. self._apply_override( group_name, change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name,", "strategy should be compatible with very much any datastore. It", "+ 1, change_id), self._revision_ext) else: # Update the existing file.", "force=True, as_root=self._requires_root) def get(self, group_name, change_id): revision_file = self._find_revision_file(group_name, change_id)", "datastores that do not support multiple configuration files. It uses", "via the Trove API. - System overrides - 'internal' configuration", "operating_system.read_file( revision_file, codec=self._codec, as_root=self._requires_root) options = guestagent_utils.update_dict(options, current) operating_system.write_file( revision_file,", ":type owner string :param group Group of the configuration and", "configuration file. :type base_config_path string :param owner Owner of the", "*args, **kwargs): \"\"\"Configure this strategy. A strategy needs to be", "requires_root Whether the strategy requires superuser privileges. :type requires_root boolean", "self._import_strategy.configure( base_config_path, owner, group, codec, requires_root) def exists(self, group_name, change_id):", "# Update the existing file. current = operating_system.read_file( revision_file, codec=self._codec,", "a new file. last_revision_index = self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path( self._revision_dir,", "'system' configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self,", "StreamCodec :param requires_root Whether the strategy requires superuser privileges. :type", "revision as a Python dict. \"\"\" return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy):", "os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir) else: self._override_strategy = override_strategy self._override_strategy.configure(", "self._override_strategy.apply(group_name, change_id, options) self.refresh_cache() def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system'", "ConfigurationOverrideStrategy(object): \"\"\"ConfigurationOverrideStrategy handles configuration files. The strategy provides functionality to", ":type codec StreamCodec :param requires_root Whether the strategy requires superuser", "the location of the configuration file. revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path),", "self.refresh_cache() return self._value_cache.get(key, default) def parse_configuration(self): \"\"\"Read contents of the", "not exist. operating_system.copy( self._base_config_path, self._base_revision_file, force=True, preserve=True, as_root=self._requires_root) base_revision =", "def _initialize_import_directory(self): \"\"\"Lazy-initialize the directory for imported revision files. \"\"\"", ":type requires_root boolean \"\"\" self._base_config_path = base_config_path self._owner = owner", "API. - System overrides - 'internal' configuration changes applied by", "is None: # Create a new file. last_revision_index = self._get_last_file_index(group_name)", "Remove the whole group if 'change_id' is None. :param group_name", "@property def has_revisions(self): \"\"\"Return True if there currently are any", "no # overrides. It will be regenerated based on the", "self._remove_override(self.USER_GROUP, change_id) def _remove_override(self, group_name, change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache() def", "self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' % (group_name, last_revision_index +", "the base revision. Write the results to the configuration file.", "You may obtain # a copy of the License at", "manage configuration overrides (e.g. ImportOverrideStrategy). Defaults to OneFileOverrideStrategy if None.", "= self._collect_revision_files(group_name) if current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1])", "is to used to order user/system sets, 'n' is an", "not already. self._apply_override( group_name, change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id, options)", "self._initialize_import_directory() revision_file = self._find_revision_file(group_name, change_id) if revision_file is None: #", "self._find_revision_file(group_name, change_id) is not None def apply(self, group_name, change_id, options):", "base_config_path, owner, group, codec, requires_root) def get_value(self, key, default=None): \"\"\"Return", "None def apply(self, group_name, change_id, options): self._initialize_import_directory() revision_file = self._find_revision_file(group_name,", "and (len(self._collect_revision_files()) > 0)) def _get_last_file_index(self, group_name): \"\"\"Get the index", "with the current configuration contents if it # does not", "codec, requires_root) def exists(self, group_name, change_id): return self._import_strategy.exists(group_name, change_id) def", "the # user group, unless specified otherwise (i.e. SYSTEM_POST_USER_GROUP #", "always applied after all user changes so that they override", "change_id), self._revision_ext) else: # Update the existing file. current =", "def exists(self, group_name, change_id): return self._import_strategy.exists(group_name, change_id) def apply(self, group_name,", "the most current configuration revision. Update if a file with", "base revision. Write the results to the configuration file. \"\"\"", "in self._collect_revision_files(): options = operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return", "import guestagent_utils from trove.guestagent.common import operating_system from trove.guestagent.common.operating_system import FileMode", "applied or removed a new configuration file is generated by", "base_config_path, owner, group, codec, requires_root=False, override_strategy=None): \"\"\" :param base_config_path Path", "# Remove the entire group. removed = self._collect_revision_files(group_name) for path", "self._override_strategy.configure( base_config_path, owner, group, codec, requires_root) def get_value(self, key, default=None):", "guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' % (group_name, last_revision_index + 1, change_id), self._revision_ext)", "return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps overrides in separate", "This strategy should be compatible with very much any datastore.", "files. :type owner string :param group Group of the configuration", "to order user/system sets, 'n' is an index used to", "Tesora Inc. # All Rights Reserved. # # Licensed under", "given contents to the base configuration file. Remove all existing", "the guestagent. The name format of override files is: '<set", "def _apply_override(self, group_name, change_id, options): if not isinstance(options, dict): #", "group_name, change_id=None): \"\"\"Rollback a given configuration override. Remove the whole", "'base' REVISION_EXT = 'rev' def __init__(self, revision_dir): \"\"\" :param revision_dir", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "the overrides internally. When an override is applied or removed", "change_id, options): if not isinstance(options, dict): # Deserialize the options", "setting. :param options Configuration changes. :type options string or dict", "\"\"\"Apply given options on the most current configuration revision. Update", "for management of datastore configuration. Its base functionality includes reading", ":param group Group of the configuration and revision files. :type", "self._group = group self._codec = codec self._requires_root = requires_root self._base_revision_file", "= override_strategy self._override_strategy.configure( base_config_path, owner, group, codec, requires_root) def get_value(self,", "change_id string :param options Configuration changes. :type options dict \"\"\"", "configuration. :returns: Updates to the base revision as a Python", "group Group of the configuration files. :type group string :param", "def parse_configuration(self): \"\"\"Read contents of the configuration file (applying overrides", "the manager requires superuser privileges. :type requires_root boolean :param override_strategy", "change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a 'system' change to the configuration. System", "self.SYSTEM_PRE_USER_GROUP if pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options) def apply_user_override(self,", "under the License. import abc import os import re import", "a dict of options for writing. self.save_configuration(self._codec.serialize(options)) else: self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP)", ":param override_strategy Strategy used to manage configuration overrides (e.g. ImportOverrideStrategy).", "group_name = ( self.SYSTEM_PRE_USER_GROUP if pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id,", "base_config_path, owner, group, codec, requires_root) def exists(self, group_name, change_id): return", "file. Remove all existing overrides (both system and user). :param", "inputs and requests. When supplied an override strategy it allows", "__init__(self, revision_dir): \"\"\" :param revision_dir Path to the directory for", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "from trove.guestagent.common import guestagent_utils from trove.guestagent.common import operating_system from trove.guestagent.common.operating_system", "'%s-%03d-%s' % (group_name, last_revision_index + 1, change_id), self._revision_ext) else: #", "current) operating_system.write_file( revision_file, options, codec=self._codec, as_root=self._requires_root) operating_system.chown( revision_file, self._owner, self._group,", "files. \"\"\" if not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir, user=self._owner, group=self._group, force=True,", "changes applied by the guestagent. The name format of override", "base revision. \"\"\" BASE_REVISION_NAME = 'base' REVISION_EXT = 'rev' def", "Write the results to the configuration file. \"\"\" if not", "a strategy for datastores that do not support multiple configuration", "all updates applied to the base revision as a single", "given key or 'default'. \"\"\" if self._value_cache is None: self.refresh_cache()", "from trove.guestagent.common.operating_system import FileMode class ConfigurationManager(object): \"\"\" ConfigurationManager is responsible", "# under the License. import abc import os import re", "Whether the strategy requires superuser privileges. :type requires_root boolean \"\"\"", "to OneFileOverrideStrategy if None. This strategy should be compatible with", "'10-system' USER_GROUP = '20-user' SYSTEM_POST_USER_GROUP = '50-system' DEFAULT_STRATEGY_OVERRIDES_SUB_DIR = 'overrides'", "last_file_name = os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern, last_file_name) if last_index_match: return", "files. :type revision_dir string \"\"\" self._revision_dir = revision_dir self._import_strategy =", "if None. This strategy should be compatible with very much", "after all user changes so that they override any user-defined", "upgrade compatibility issues in case the default implementation changes in", "defines its strategy explicitly to avoid upgrade compatibility issues in", "revision_dir, revision_ext): \"\"\" :param revision_dir Path to the directory for", "self._regenerate_base_configuration() def remove(self, group_name, change_id=None): if self._import_strategy.has_revisions: self._import_strategy.remove(group_name, change_id=change_id) self._regenerate_base_configuration()", "default. Store the revisions in a # sub-directory at the", ":param base_config_path Path to the configuration file. :type base_config_path string", "change_id): return self._find_revision_file(group_name, change_id) is not None def apply(self, group_name,", "contents of the configuration file (applying overrides if any) and", "strategy it allows the user to manage configuration overrides as", "= self._find_revision_file(group_name, change_id) if revision_file is None: # Create a", "override is removed. We keep two sets of override files", "given configuration override :param group_name The group the override belongs", "a 'system' configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def", "strategy needs to be configured before it can be used.", "Trove API. - System overrides - 'internal' configuration changes applied", "\"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root))", "files. The strategy provides functionality to enumerate, apply and remove", "The base revision file is no longer needed if there", "as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache() def has_system_override(self, change_id): \"\"\"Return", "changes. :type options dict \"\"\" @abc.abstractmethod def remove(self, group_name, change_id=None):", "apply and remove configuration overrides. \"\"\" @abc.abstractmethod def configure(self, *args,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "operating_system.copy( self._base_config_path, self._base_revision_file, force=True, preserve=True, as_root=self._requires_root) base_revision = operating_system.read_file( self._base_revision_file,", ":param options Configuration changes. :type options string or dict \"\"\"", "apply(self, group_name, change_id, options): self._initialize_import_directory() revision_file = self._find_revision_file(group_name, change_id) if", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "apply(self, group_name, change_id, options): self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration() def remove(self,", "In general system overrides will get applied over the #", "keep track of the order in which overrides within their", "exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self,", "\"\"\" @abc.abstractmethod def apply(self, group_name, change_id, options): \"\"\"Apply given options", "for import files. :type revision_dir string \"\"\" self._revision_dir = revision_dir", "name_pattern = self._build_rev_name_pattern(group_name, change_id) found = operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern,", "most current version of configuration. :returns: Updates to the base", "ConfigurationManager. \"\"\" @abc.abstractmethod def exists(self, group_name, change_id): \"\"\"Return whether a", "parse_updates(self): parsed_options = {} for path in self._collect_revision_files(): options =", ":type options string or dict \"\"\" group_name = ( self.SYSTEM_PRE_USER_GROUP", "of the configuration file. revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy", "KIND, either express or implied. See the # License for", "Strategy to keep the overrides internally. When an override is", "any user-defined setting. :param options Configuration changes. :type options string", "well. \"\"\" # Configuration group names. The names determine the", "files in a separate directory. - User overrides - configuration", "@abc.abstractmethod def get(self, group_name, change_id=None): \"\"\"Return the contents of a", "privileges. :type requires_root boolean :param override_strategy Strategy used to manage", "the directory for imported revision files. \"\"\" if not os.path.exists(self._revision_dir):", "in the same order in which they were applied. \"\"\"", "configuration file on the first 'apply()'. operating_system.remove(self._base_revision_file, force=True, as_root=self._requires_root) def", "if change_id: # Remove a given file. revision_file = self._find_revision_file(group_name,", "# Serialize a dict of options for writing. self.save_configuration(self._codec.serialize(options)) else:", "self.REVISION_EXT) def configure(self, base_config_path, owner, group, codec, requires_root): \"\"\" :param", "be used. It would typically be configured by the ConfigurationManager.", "Copyright 2015 Tesora Inc. # All Rights Reserved. # #", "self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner, group, codec, requires_root) def", "owner string :param group Group of the configuration and revision", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "a separate directory. - User overrides - configuration overrides applied", "given 'system' change exists. \"\"\" return (self._override_strategy.exists(self.SYSTEM_POST_USER_GROUP, change_id) or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP,", "= owner self._group = group self._codec = codec self._requires_root =", "return self.FILE_NAME_PATTERN % (group_name, change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is", "files. It uses the Import Strategy to keep the overrides", "group_name, change_id=None): \"\"\"Return the contents of a given configuration override", "file. revision_dir = guestagent_utils.build_file_path( os.path.dirname(base_config_path), self.DEFAULT_STRATEGY_OVERRIDES_SUB_DIR) self._override_strategy = OneFileOverrideStrategy(revision_dir) else:", "self._remove_override(self.SYSTEM_POST_USER_GROUP, change_id) self._remove_override(self.SYSTEM_PRE_USER_GROUP, change_id) def remove_user_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'user'", "self._find_revision_file(group_name, change_id) if revision_file: removed.add(revision_file) else: # Remove the entire", "import files. :type revision_dir string :param revision_ext Extension of revision", "force=True, as_root=self._requires_root) def get(self, group_name, change_id): return self._import_strategy.get(group_name, change_id) def", "applied over the # user group, unless specified otherwise (i.e.", ":type change_id string :param options Configuration changes. :type options dict", "return base_options def save_configuration(self, options): \"\"\"Write given contents to the", "'user' configuration change. \"\"\" self._remove_override(self.USER_GROUP, change_id) def _remove_override(self, group_name, change_id):", "override_strategy Strategy used to manage configuration overrides (e.g. ImportOverrideStrategy). Defaults", "self._revision_dir, is_directory=True, as_root=self._requires_root) and (len(self._collect_revision_files()) > 0)) def _get_last_file_index(self, group_name):", "The strategy provides functionality to enumerate, apply and remove configuration", "very much any datastore. It is recommended each datastore defines", "A strategy needs to be configured before it can be", "implied. See the # License for the specific language governing", "file is simply deleted when the override is removed. We", "sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self, group_name, change_id): name_pattern", "guestagent. The name format of override files is: '<set prefix>-<n>-<group", "already. self._apply_override( group_name, change_id, self._codec.deserialize(options)) else: self._override_strategy.apply(group_name, change_id, options) self.refresh_cache()", "@abc.abstractmethod def configure(self, *args, **kwargs): \"\"\"Configure this strategy. A strategy", "revision_dir self._revision_ext = revision_ext def configure(self, base_config_path, owner, group, codec,", "new file. last_revision_index = self._get_last_file_index(group_name) revision_file = guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s'", "The group the override belongs to. :type group_name string :param", "user inputs and requests. When supplied an override strategy it", "self._requires_root = requires_root self._value_cache = None if not override_strategy: #", "operating_system from trove.guestagent.common.operating_system import FileMode class ConfigurationManager(object): \"\"\" ConfigurationManager is", "else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options) def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID): \"\"\"Apply", "= set() if change_id: # Remove a given file. revision_file", "options): \"\"\"Write given contents to the base configuration file. Remove", "Remove all existing overrides (both system and user). :param contents", "remove(self, group_name, change_id=None): \"\"\"Rollback a given configuration override. Remove the", "__init__(self, revision_dir, revision_ext): \"\"\" :param revision_dir Path to the directory", "contents if it # does not exist. operating_system.copy( self._base_config_path, self._base_revision_file,", "all changes on a saved-off base revision. \"\"\" BASE_REVISION_NAME =", "strategy keeps overrides in separate files that get imported into", "self._revision_dir, '%s-%03d-%s' % (group_name, last_revision_index + 1, change_id), self._revision_ext) else:", "given options on the most current configuration revision. Update if", "the whole group if 'change_id' is None. :param group_name The", "if not isinstance(options, dict): # Deserialize the options into a", "StreamCodec :param requires_root Whether the manager requires superuser privileges. :type", "= self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self,", "int(last_index_match.group(1)) return 0 def _collect_revision_files(self, group_name='.+'): \"\"\"Collect and return a", "exists(self, group_name, change_id): return self._find_revision_file(group_name, change_id) is not None def", "self._find_revision_file(group_name, change_id) if revision_file is None: # Create a new", "recursive=True, pattern=name_pattern, as_root=self._requires_root)) def _find_revision_file(self, group_name, change_id): name_pattern = self._build_rev_name_pattern(group_name,", "in which they were applied. \"\"\" name_pattern = self._build_rev_name_pattern(group_name=group_name) return", "= 'base' REVISION_EXT = 'rev' def __init__(self, revision_dir): \"\"\" :param", "if last_index_match: return int(last_index_match.group(1)) return 0 def _collect_revision_files(self, group_name='.+'): \"\"\"Collect", "# overrides. It will be regenerated based on the current", "\"\"\" self._remove_override(self.USER_GROUP, change_id) def _remove_override(self, group_name, change_id): self._override_strategy.remove(group_name, change_id) self.refresh_cache()", "get applied. System groups are divided into two camps; pre-user", "base configuration file which never changes itself. An override file", "def __init__(self, base_config_path, owner, group, codec, requires_root=False, override_strategy=None): \"\"\" :param", "of the override within the group. :type change_id string \"\"\"", "get imported into the base configuration file which never changes", "= self._collect_revision_files(group_name) for path in removed: operating_system.remove(path, force=True, as_root=self._requires_root) def", "Group of the configuration files. :type group string :param codec", "(group_name, change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a strategy for", "parse_updates(self): \"\"\"Return all updates applied to the base revision as", "obtain # a copy of the License at # #", "def _find_revision_file(self, group_name, change_id): name_pattern = self._build_rev_name_pattern(group_name, change_id) found =", "deleted when the override is removed. We keep two sets", "override strategy it allows the user to manage configuration overrides", "recursive=True, pattern=name_pattern, as_root=self._requires_root) return next(iter(found), None) def _build_rev_name_pattern(self, group_name='.+', change_id='.+'):", "owner Owner of the configuration files. :type owner string :param", "configured before it can be used. It would typically be", "revision_file = self._find_revision_file(group_name, change_id) if revision_file is None: # Create", "to the base revision as a Python dict. \"\"\" return", "\"\"\" return {} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps overrides in", "self._regenerate_base_configuration() if not self._import_strategy.has_revisions: # The base revision file is", "\"\"\" group_name = ( self.SYSTEM_PRE_USER_GROUP if pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name,", "= guestagent_utils.build_file_path( self._revision_dir, self.BASE_REVISION_NAME, self.REVISION_EXT) self._import_strategy.configure( base_config_path, owner, group, codec,", "keep the overrides internally. When an override is applied or", "as_root=self._requires_root) return next(iter(found), None) def _build_rev_name_pattern(self, group_name='.+', change_id='.+'): return self.FILE_NAME_PATTERN", "= self._find_revision_file(group_name, change_id) if revision_file: removed.add(revision_file) else: # Remove the", "dict if the base file is always the most current", "requires_root) def exists(self, group_name, change_id): return self._import_strategy.exists(group_name, change_id) def apply(self,", "ConfigurationManager is responsible for management of datastore configuration. Its base", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a strategy for datastores", "most current configuration revision. Update if a file with the", "operating_system.chown( self._base_config_path, self._owner, self._group, as_root=self._requires_root) operating_system.chmod( self._base_config_path, FileMode.ADD_READ_ALL, as_root=self._requires_root) self.refresh_cache()", "configuration format. :type codec StreamCodec :param requires_root Whether the manager", "configuration file. \"\"\" if not os.path.exists(self._base_revision_file): # Initialize the file", "change_id) is not None def apply(self, group_name, change_id, options): self._initialize_import_directory()", "def remove_system_override(self, change_id=DEFAULT_CHANGE_ID): \"\"\"Revert a 'system' configuration change. \"\"\" self._remove_override(self.SYSTEM_POST_USER_GROUP,", "the user overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self, group_name, change_id,", "change_id=None): \"\"\"Rollback a given configuration override. Remove the whole group", "directory. - User overrides - configuration overrides applied by the", "Remove a given file. revision_file = self._find_revision_file(group_name, change_id) if revision_file:", "def apply(self, group_name, change_id, options): self._import_strategy.apply(group_name, change_id, options) self._regenerate_base_configuration() def", "given configuration override. Remove the whole group if 'change_id' is", "Remove the entire group. removed = self._collect_revision_files(group_name) for path in", "group_name): \"\"\"Get the index of the most current file in", "a Python dict. \"\"\" base_options = operating_system.read_file( self._base_config_path, codec=self._codec, as_root=self._requires_root)", "the base configuration file. Remove all existing overrides (both system", "as_root=self._requires_root)) def _find_revision_file(self, group_name, change_id): name_pattern = self._build_rev_name_pattern(group_name, change_id) found", "default) def parse_configuration(self): \"\"\"Read contents of the configuration file (applying", "be configured by the ConfigurationManager. \"\"\" @abc.abstractmethod def exists(self, group_name,", "import six from trove.guestagent.common import guestagent_utils from trove.guestagent.common import operating_system", "configuration file. Remove all existing overrides (both system and user).", "to the configuration. The 'system' values will be re-applied over", "configuration and revision files. :type owner string :param group Group", "to keep the overrides internally. When an override is applied", "\"\"\"Apply a 'user' change to the configuration. The 'system' values", "BASE_REVISION_NAME = 'base' REVISION_EXT = 'rev' def __init__(self, revision_dir): \"\"\"", "pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options) def apply_user_override(self, options, change_id=DEFAULT_CHANGE_ID):", "change_id=change_id) self._regenerate_base_configuration() if not self._import_strategy.has_revisions: # The base revision file", "in separate files that get imported into the base configuration", "change_id, options): \"\"\"Apply given options on the most current configuration", "implementation changes in the future. :type override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path", "2.0 (the \"License\"); you may # not use this file", "\"\"\"Import strategy keeps overrides in separate files that get imported", "base revision as a single dict. Return an empty dict", "if a file with the same id already exists. :param", "as a single dict. Return an empty dict if the", "of revision files. :type revision_ext string \"\"\" self._revision_dir = revision_dir", "self._build_rev_name_pattern(group_name=group_name) last_file_name = os.path.basename(current_files[-1]) last_index_match = re.match(name_pattern, last_file_name) if last_index_match:", "user/system sets, 'n' is an index used to keep track", "if pre_user else self.SYSTEM_POST_USER_GROUP) self._apply_override(group_name, change_id, options) def apply_user_override(self, options,", "by applicable law or agreed to in writing, software #", "configure(self, base_config_path, owner, group, codec, requires_root): \"\"\" :param base_config_path Path", "the current configuration contents if it # does not exist.", "for imported revision files. \"\"\" if not os.path.exists(self._revision_dir): operating_system.create_directory( self._revision_dir,", "datastore. It is recommended each datastore defines its strategy explicitly", "self._value_cache.get(key, default) def parse_configuration(self): \"\"\"Read contents of the configuration file", "\"\"\" BASE_REVISION_NAME = 'base' REVISION_EXT = 'rev' def __init__(self, revision_dir):", "force=True, preserve=True, as_root=self._requires_root) base_revision = operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes", "change. \"\"\" self._remove_override(self.USER_GROUP, change_id) def _remove_override(self, group_name, change_id): self._override_strategy.remove(group_name, change_id)", "path in self._collect_revision_files(): options = operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options)", "default implementation changes in the future. :type override_strategy ConfigurationOverrideStrategy \"\"\"", "of override files in a separate directory. - User overrides", "group_name, change_id): name_pattern = self._build_rev_name_pattern(group_name, change_id) found = operating_system.list_files_in_directory( self._revision_dir,", "do not support multiple configuration files. It uses the Import", "self._override_strategy.remove(self.USER_GROUP) self._override_strategy.remove(self.SYSTEM_PRE_USER_GROUP) self._override_strategy.remove(self.SYSTEM_POST_USER_GROUP) operating_system.write_file( self._base_config_path, options, as_root=self._requires_root) operating_system.chown( self._base_config_path, self._owner,", "OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a strategy for datastores that do not", "Strategy used to manage configuration overrides (e.g. ImportOverrideStrategy). Defaults to", "revision_ext def configure(self, base_config_path, owner, group, codec, requires_root): \"\"\" :param", "group, codec, requires_root) def exists(self, group_name, change_id): return self._import_strategy.exists(group_name, change_id)", "files is: '<set prefix>-<n>-<group name>.<ext>' where 'set prefix' is to", "# Use OneFile strategy by default. Store the revisions in", "# All Rights Reserved. # # Licensed under the Apache", "string :param options Configuration changes. :type options dict \"\"\" @abc.abstractmethod", "self._revision_dir, user=self._owner, group=self._group, force=True, as_root=self._requires_root) def remove(self, group_name, change_id=None): removed", "codec StreamCodec :param requires_root Whether the strategy requires superuser privileges.", "requires_root self._value_cache = None if not override_strategy: # Use OneFile", "applicable law or agreed to in writing, software # distributed", "as_root=self._requires_root) def get(self, group_name, change_id): return self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self):", "change_id) found = operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root) return next(iter(found),", "the default implementation changes in the future. :type override_strategy ConfigurationOverrideStrategy", "issues in case the default implementation changes in the future.", "change to the configuration. The 'system' values will be re-applied", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "operating_system.chmod( revision_file, FileMode.ADD_READ_ALL, as_root=self._requires_root) def _initialize_import_directory(self): \"\"\"Lazy-initialize the directory for", "existing revision files. The files should be sorted in the", "the ConfigurationManager. \"\"\" @abc.abstractmethod def exists(self, group_name, change_id): \"\"\"Return whether", "configuration overrides as well. \"\"\" # Configuration group names. The", "file. :type contents string or dict \"\"\" if isinstance(options, dict):", "string :param owner Owner of the configuration and revision files.", "requires superuser privileges. :type requires_root boolean \"\"\" self._base_config_path = base_config_path", "that they override any user-defined setting. :param options Configuration changes.", "override_strategy ConfigurationOverrideStrategy \"\"\" self._base_config_path = base_config_path self._owner = owner self._group", "string :param codec Codec for reading/writing of the particular configuration", "{} class ImportOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"Import strategy keeps overrides in separate files", "value at a given key or 'default'. \"\"\" if self._value_cache", "else: self._override_strategy = override_strategy self._override_strategy.configure( base_config_path, owner, group, codec, requires_root)", "_find_revision_file(self, group_name, change_id): name_pattern = self._build_rev_name_pattern(group_name, change_id) found = operating_system.list_files_in_directory(", "options Configuration changes. :type options string or dict \"\"\" group_name", "The name format of override files is: '<set prefix>-<n>-<group name>.<ext>'", "def __init__(self, revision_dir, revision_ext): \"\"\" :param revision_dir Path to the", "is None. :param group_name The group the override belongs to.", "is removed. We keep two sets of override files in", "manager requires superuser privileges. :type requires_root boolean :param override_strategy Strategy", "# License for the specific language governing permissions and limitations", "has_system_override(self, change_id): \"\"\"Return whether a given 'system' change exists. \"\"\"", "or self._override_strategy.exists(self.SYSTEM_PRE_USER_GROUP, change_id)) def apply_system_override(self, options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a", "functionality includes reading and writing configuration files. It is responsible", "group_name, change_id, options): if not isinstance(options, dict): # Deserialize the", "of datastore configuration. Its base functionality includes reading and writing", "REVISION_EXT = 'rev' def __init__(self, revision_dir): \"\"\" :param revision_dir Path", "\"\"\" if self._value_cache is None: self.refresh_cache() return self._value_cache.get(key, default) def", "supplied an override strategy it allows the user to manage", "def remove(self, group_name, change_id=None): removed = set() if change_id: #", "removed a new configuration file is generated by applying all", "base_options def save_configuration(self, options): \"\"\"Write given contents to the base", "configuration override. Remove the whole group if 'change_id' is None.", "key or 'default'. \"\"\" if self._value_cache is None: self.refresh_cache() return", "the order in which the # groups get applied. System", "options, change_id=DEFAULT_CHANGE_ID, pre_user=False): \"\"\"Apply a 'system' change to the configuration.", "get(self, group_name, change_id): return self._import_strategy.get(group_name, change_id) def _regenerate_base_configuration(self): \"\"\"Gather all", "import re import six from trove.guestagent.common import guestagent_utils from trove.guestagent.common", "\"\"\" current_files = self._collect_revision_files(group_name) if current_files: name_pattern = self._build_rev_name_pattern(group_name=group_name) last_file_name", "% (group_name, change_id, self._revision_ext) class OneFileOverrideStrategy(ConfigurationOverrideStrategy): \"\"\"This is a strategy", "an override is applied or removed a new configuration file", "string :param group Group of the configuration and revision files.", "self._collect_revision_files(): options = operating_system.read_file(path, codec=self._codec, as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return parsed_options", "def get(self, group_name, change_id=None): \"\"\"Return the contents of a given", "is recommended each datastore defines its strategy explicitly to avoid", "current configuration contents if it # does not exist. operating_system.copy(", "as_root=self._requires_root) guestagent_utils.update_dict(options, parsed_options) return parsed_options @property def has_revisions(self): \"\"\"Return True", "in which overrides within their set got applied. \"\"\" FILE_NAME_PATTERN", "language governing permissions and limitations # under the License. import", "the same order in which they were applied. \"\"\" name_pattern", "License. You may obtain # a copy of the License", "applied. System groups are divided into two camps; pre-user #", "ANY KIND, either express or implied. See the # License", "before it can be used. It would typically be configured", "owner, group, codec, requires_root) def get_value(self, key, default=None): \"\"\"Return the", "sorted in the same order in which they were applied.", "group_name, change_id, options): self._initialize_import_directory() revision_file = self._find_revision_file(group_name, change_id) if revision_file", "trove.guestagent.common import operating_system from trove.guestagent.common.operating_system import FileMode class ConfigurationManager(object): \"\"\"", "overrides. \"\"\" @abc.abstractmethod def configure(self, *args, **kwargs): \"\"\"Configure this strategy.", "index used to keep track of the order in which", "by default. Store the revisions in a # sub-directory at", "override files is: '<set prefix>-<n>-<group name>.<ext>' where 'set prefix' is", "governing permissions and limitations # under the License. import abc", "overrides\"\"\" return self._override_strategy.get(self.USER_GROUP, change_id) def _apply_override(self, group_name, change_id, options): if", "name_pattern = self._build_rev_name_pattern(group_name=group_name) return sorted(operating_system.list_files_in_directory( self._revision_dir, recursive=True, pattern=name_pattern, as_root=self._requires_root)) def", "preserve=True, as_root=self._requires_root) base_revision = operating_system.read_file( self._base_revision_file, codec=self._codec, as_root=self._requires_root) changes =", "It would typically be configured by the ConfigurationManager. \"\"\" @abc.abstractmethod", "if self._value_cache is None: self.refresh_cache() return self._value_cache.get(key, default) def parse_configuration(self):", "codec=self._codec, as_root=self._requires_root) updates = self._override_strategy.parse_updates() guestagent_utils.update_dict(updates, base_options) return base_options def", "= guestagent_utils.build_file_path( self._revision_dir, '%s-%03d-%s' % (group_name, last_revision_index + 1, change_id),", "overrides if any) and parse it into a dict. :returns:", "or implied. See the # License for the specific language" ]
[ "Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params): writeUp =", "{request_object[\"description\"]}</th>\\n' else: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp += '</tr>\\n</thead>\\n'", "Request.html', \"w\") as html_file: description_ = \"\" if \"parameters\" in", "\"requestBody\" in content: if \"description\" in content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"])", "in request_object_: request_object_properties_ = request_object_[\"properties\"] description_ = request_object_[\"description\"] if \"description\"", "f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f' \"{item[\"name\"]}\": {example_},\\n' return", "\"boolean\": example_ += \"true\" elif type_ == \"string\": if \"format\"", "f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not in type_: if \"string\" in type_:", "= request_object_[\"properties\"] example_, __, __ = ExampleWriting(request_object_properties_, [], 1) if", "!= \".\": description_ += \".\" if \"enum\" in properties: type_", "f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli", "\"2021-11-26T15:18:27.693Z\" else: example_ += \"0\" else: type_ = prop_type +", "\",\\n\" example_ = tab + f' \"{name}\": ' if type_", "order = 0 if \"content\" in requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"]", "= path[-1] enum = \"\" item_list.append(path) request_object = doc for", "tab + \"}\", line, item_list return example + \"\\n\" +", "in array_obj: type_ = array_obj[\"type\"] + \" Array\" if \"enum\"", "method takes no parameters.</td>\\n</tr>\\n</table>' description_ += f'The <code>{api_call}</code> API accepts", "result[\"post\"] if \"post\" in result else result[\"get\"] # Create path", "in properties else \"/\" if (example != \"{\\n\" and not", "+= \"true\" elif type_ == \"string\": if \"format\" in properties:", "api_call, result in paths.items(): j = 1 content = result[\"post\"]", "= array_obj[\"type\"] + \" Array\" if \"enum\" in array_obj: type_", "+= \"\\n\" + tab + \" ]\" if order ==", "example_ = 'true' writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example", "0 while i < len(item_list): request_object = doc for item", "__ = ExampleWriting(request_object_properties_, [], 1) if \"type\" in array_obj: type_", "\"type\" in properties else \"object\" description_ = properties[\"description\"] if \"description\"", "write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+2) example_ += tab", "= tab + f' \"{name}\": ' if type_ == \"array\":", "else: example_ += '\"string\"' if description_[-1] != \".\": description_ +=", "writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{item}</code> Model", "in paths.items(): j = 1 content = result[\"post\"] if \"post\"", "request_object_[\"type\"] description_ = request_object_[\"description\"] if \"description\" in request_object_ else \"/\"", "\"}\", line, item_list return example + \"\\n\" + tab +", "= {\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params):", "ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif \"type\" in request_object_:", "<br/> {description_}</td>\\n</tr>\\n') example += example_ if not array: return example", "tab + \" ]\" if order == 0 or array:", "+= 1 # Create Description part if having one if", "paths = doc[\"paths\"] for api_call, result in paths.items(): j =", "= component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True order += 1 else: writeUp", "component = requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component] i = 0 while", "if (example != \"{\\n\" and not array) or (example !=", "in component: component = component[\"$ref\"].split(\"/\")[1:] elif \"items\" in component and", "+ f'$({prop_type[\"format\"]})' + \" object\" if prop_type[\"format\"] == \"date-time\": example_", "array_obj: type_ = type_ + \" Enum\" description_ += f'", "type_ = prop_type + f'$({prop_type[\"format\"]})' + \" object\" if prop_type[\"format\"]", "\"type\" in request_object_: properties = request_object_properties_ = request_object_ type_ =", "\"[\\n {\\n\" else: example = \"{\\n\" line = [] for", "Maximum: {item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"] elif \"default\" in item[\"schema\"]: description_", "in requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in component: component", "\"\" item_list.append(path) request_object = doc for item in path: request_object", "request_object[\"enum\"][0] + '\"' example += f'\\n {text},' example += '\\b\\n", "\" object\" example_ += f'\"{prop_type}\"' elif \"$ref\" in add_prop: ref", "\" Array\" example_ += tab + f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\"", "+ description_ + \"</p>\\n\") html_file.write(writeUp) j += 1 # Create", "== \"integer\" or type_ == \"number\": example_ += \"0\" elif", "item, index, page was not found.</p>\\n') continue elif code ==", "example_ += '[\\n' if \"type\" in properties[\"items\"]: type_ = properties[\"items\"][\"type\"]", "colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code> method takes no parameters.</td>\\n</tr>\\n</table>' description_", "index, page was not found.</p>\\n') continue elif code == \"default\":", "elif \"default\" in item[\"schema\"]: description_ += f' Default: {item[\"schema\"][\"default\"]}' example_", "\"description\" in request_object: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n'", "description_[-1] != \".\": description_ += \".\" if \"enum\" in properties:", "import yaml documentations = {\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"}", "array_obj: type_ = array_obj[\"type\"] + \" Array\" if \"enum\" in", "= pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) # Create Introduction part with open(destination_folder", "\".\" description_ += \" \" writeUp = ResponseTable(content[\"requestBody\"]) else: writeUp", "'<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n'", "continue elif \"type\" in request_object and \"properties\" not in request_object:", "request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_ +=", "= request_object[\"description\"] if description_[-1] != \".\": description_ += \".\" writeUp", "array = True order += 1 else: writeUp += '<table", "yaml_file = open(source) doc = yaml.load(yaml_file, Loader=yaml.Loader) paths = doc[\"paths\"]", "in request_object: for y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1", "f'{j:02} Responses.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API provides", "in request_object: text = request_object[\"example\"] elif \"enum\" in request_object: text", "+= \".\" if \"type\" in item[\"schema\"]: type_ = item[\"schema\"][\"type\"] else:", "= ref[-1] + \" Array\" if ref not in item_list:", "\"2021-11-26T15:18:27.693Z\" else: example_ += '\"string\"' if description_[-1] != \".\": description_", "if \"type\" in array_obj: type_ = array_obj[\"type\"] + \" Array\"", "= ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation of {section} is generated and inplace!\")", "request_object: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n' else: writeUp", "\"content\" in requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in component:", "if having one if \"description\" in content: with open(destination_folder /", "Error</h4>\\n') writeUp = ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation of {section} is generated", "else: writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"1\"><code>{api_call}</code>", "in request_object: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n' else:", "\"array\": array_obj = item[\"schema\"][\"items\"] if \"$ref\" in array_obj: type_ =", "\"\" if \"parameters\" in content: writeUp = RequestTable(api_call, content[\"parameters\"]) elif", "no parameters.</td>\\n</tr>\\n</table>' description_ += f'The <code>{api_call}</code> API accepts requests in", "for y in prop: path = y[\"$ref\"].split(\"/\")[1:] name = path[-1]", "f'{j:02} Description.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j +=", "return writeUp def ExampleWriting(request_object_properties, item_list, array=False, order=0): tab = \"", "requested item, index, page was not found.</p>\\n') continue elif code", "+= f' Options: {str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not", "properties: type_ += \" Enum\" description_ += f' Options :", "* order if array: example = \"[\\n {\\n\" else: example", "\"type\" in request_object and \"properties\" not in request_object: request_object_properties =", "elif type_ == \"object\": if \"additionalProperties\" in properties: add_prop =", "RequestTable(api_call, params): writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th", "\"string\": if \"format\" in properties: type_ += f'(${properties[\"format\"]})' example_ +=", "request_object_: request_object_properties_ = request_object_[\"properties\"] example_, __, __ = ExampleWriting(request_object_properties_, [],", "f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/> {description_ + enum}</td>\\n</tr>\\n' if \"example\"", "= [] for name, properties in request_object_properties.items(): type_ = properties[\"type\"]", "\"Optional. \" if \"required\" not in item or not item[\"required\"]", "not in item or not item[\"required\"] else \"\" description_ +=", "= False order -= 1 for line in html_property: writeUp", "f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_", "properties else \"object\" description_ = properties[\"description\"] if \"description\" in properties", "order -= 1 for line in html_property: writeUp += line", "else \"/\" if (example != \"{\\n\" and not array) or", "= 1 content = result[\"post\"] if \"post\" in result else", "Model - Unauthorized response from the API. Key is missing,", "for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue elif", "\"}\\n\" + \" \" * (order-1) + \"]\", line, item_list", "properties[\"example\"] type_ += f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg, str): eg =", "= request_object_[item] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] write_up,", "{text},' example += '\\b\\n ]\\n]' writeUp += example writeUp +=", "= requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component] i = 0 while i", "if \"parameters\" in content: writeUp = RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\"", "Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code> method takes no parameters.</td>\\n</tr>\\n</table>' description_ +=", "width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp", "example_ += write_up elif \"type\" in request_object_: properties = request_object_properties_", "\"enum\" in request_object: enum = \" Options: \" + str(request_object[\"enum\"])", "\"type\" in properties[\"items\"]: type_ = properties[\"items\"][\"type\"] + \" Array\" example_", "elif \"enum\" in request_object: text = '\"' + request_object[\"enum\"][0] +", "<code>{request_object[\"type\"]}</code> <br/> {description_ + enum}</td>\\n</tr>\\n' if \"example\" in request_object: text", "type_: example_ = tab + f' \"{name}\": \"{properties[\"enum\"][0]}\"' else: example_", "from the API. Key is missing, invalid, or timestamp is", "in type_: example_ = tab + f' \"{name}\": \"{properties[\"enum\"][0]}\"' else:", "tab = \" \" * order if array: example =", "not in item_list: item_list.append(ref) request_object_ = doc for item in", "else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in item[\"schema\"]: description_ +=", "html_file: description_ = \"\" if \"parameters\" in content: writeUp =", "= properties[\"example\"] type_ += f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg, str): eg", "== \"array\": array_obj = item[\"schema\"][\"items\"] if \"$ref\" in array_obj: type_", "if \"format\" in properties: type_ += f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\"", "request_object_ else \"/\" write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1)", "= properties[\"type\"] if \"type\" in properties else \"object\" description_ =", "# Create path if not exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True,", "+= f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f' \"{item[\"name\"]}\": {example_},\\n'", "= \" Options: \" + str(request_object[\"enum\"]) description_ = request_object[\"description\"] if", "if description_[-1] != \".\": description_ += \".\" if \"type\" in", "item[\"schema\"]: description_ += f' Maximum: {item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"] elif", "or not item[\"required\"] else \"\" description_ += item[\"description\"] if description_[-1]", "Array\" request_object_ = doc for path in ref: request_object_ =", "+= '</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp", "= doc for item in item_list[i]: request_object = request_object[item] if", "and not array) or (example != \"[\\n {\\n\" and array):", "+= item[\"description\"] if description_[-1] != \".\": description_ += \".\" if", "request_object_[\"description\"] if \"description\" in request_object_ else \"/\" elif type_ ==", "Generic Error</h4>\\n') writeUp = ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation of {section} is", "html_file.write('<h4>200 Success</h4>\\n') elif code == \"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table", "documentations = {\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call,", "writeUp += '</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n'", "type_ = item[\"schema\"][\"type\"] else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in", "= doc[\"paths\"] for api_call, result in paths.items(): j = 1", "Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for item in", "+= f' Maximum: {item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"] elif \"default\" in", "in properties: type_ += f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\" else: example_", "f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code> method takes no parameters.</td>\\n</tr>\\n</table>'", "item_list = [component] i = 0 while i < len(item_list):", "def RequestTable(api_call, params): writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp +=", "= \"\" if \"parameters\" in content: writeUp = RequestTable(api_call, content[\"parameters\"])", "description_[-1] != \".\": description_ += \".\" description_ += \" \"", "= tab + f' \"{name}\": {properties[\"enum\"][0]}' if \"example\" in properties:", "]\\n]' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1", "\".\": description_ += \".\" description_ += \" \" writeUp =", "\"Enum\" not in type_: if \"string\" in type_: example_ =", "in component and \"$ref\" in component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array", "class=\"cli section-example-container\"><pre>\\n' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i +=", "example_ if not array: return example + \"\\n\" + tab", "order if array: example = \"[\\n {\\n\" else: example =", "part with open(destination_folder / f'{j:02} Responses.html', \"w\") as html_file: html_file.write('<p>\\n')", "class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized response from the", "+ \" \" * (order-1) + \"]\", line, item_list for", "API provides a response in the following format:\\n') html_file.write('</p>\\n') request_body", "content = result[\"post\"] if \"post\" in result else result[\"get\"] #", "doc for item in item_list[i]: request_object = request_object[item] if \"items\"", "in request_body.items(): if code == \"200\": html_file.write('<h4>200 Success</h4>\\n') elif code", "writeUp += '</tr>\\n</thead>' for y in prop: path = y[\"$ref\"].split(\"/\")[1:]", "paths.items(): j = 1 content = result[\"post\"] if \"post\" in", "\"</p>\\n\") html_file.write(writeUp) j += 1 # Create Response part with", "= item[\"schema\"][\"default\"] if type_ == \"array\": array_obj = item[\"schema\"][\"items\"] if", "f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for", "\" \" * 2 + write_up elif type_ == \"object\":", "'\"' + eg + '\"' example_ = tab + f'", "Not Found Error</h4>\\n') html_file.write('<p>The requested item, index, page was not", "type_ += f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += '\"string\"'", "\" \" * (order-1) + \"]\", line, item_list for section,", "= \"\" item_list.append(path) request_object = doc for item in path:", "return writeUp + example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp =", "\"description\" in content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"]) if description_[-1] != \".\":", "\"404\": html_file.write('<h4>404 Not Found Error</h4>\\n') html_file.write('<p>The requested item, index, page", "example_ += \"0\" else: type_ = prop_type + \" object\"", "ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\" if", "- {request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>' for y in prop: path", "Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params): writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n'", "= RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\" in content: if \"description\" in", "Found Error</h4>\\n') html_file.write('<p>The requested item, index, page was not found.</p>\\n')", "+ \" Array\" example_ += tab + f' \"{properties[\"items\"][\"type\"]}\"' elif", "Model - {request_object[\"description\"]}</th>\\n' else: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp", "__, item_list = ExampleWriting(request_object_properties_, item_list, order=order+2) example_ += tab +", "+ f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:]", "= item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in item[\"schema\"]: description_ += f' Minimum:", "params: example_ = \"/\" description_ = \"Optional. \" if \"required\"", "type_ = ref[-1] + \" Array\" if ref not in", "html_file.write(\"</p>\\n\") j += 1 # Create Description part if having", "j += 1 # Create Response part with open(destination_folder /", "\" * (order-1) + \"]\", line, item_list for section, source", "format:\\n' html_file.write(\"<p>\\n\" + description_ + \"</p>\\n\") html_file.write(writeUp) j += 1", "API accepts requests in the following format:\\n' html_file.write(\"<p>\\n\" + description_", "+= example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 continue elif", "!= \".\": description_ += \".\" writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td>", "if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] example_, __, __", "ref[-1] + \" Array\" if ref not in item_list: item_list.append(ref)", "\"format\" in prop_type: type_ = prop_type + f'$({prop_type[\"format\"]})' + \"", "/ f'{j:02} Description.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j", "if \"required\" not in item or not item[\"required\"] else \"\"", "f'$({prop_type[\"format\"]})' + \" object\" if prop_type[\"format\"] == \"date-time\": example_ +=", "/ f'{j:02} Responses.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API", "type_ == \"integer\" or type_ == \"number\": example_ += \"0\"", "\" * 2 + write_up elif type_ == \"object\": if", "+ tab + \"}\", line, item_list return example + \"\\n\"", "qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in request_object: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model", "request_object_: request_object_properties_ = request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list,", "f' Options : {properties[\"enum\"]}' if \"string\" in type_: example_ =", "and \"$ref\" in component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True", "array, order) if array: array = False order -= 1", "item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"properties\" in request_object: request_object_properties", "= '\"' + request_object[\"enum\"][0] + '\"' example += f'\\n {text},'", "\"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params): writeUp = '<table class=\"table", "example += f' \"{item[\"name\"]}\": {example_},\\n' return writeUp + example +", "+= '\"string\"' if description_[-1] != \".\": description_ += \".\" if", "html_file.write(writeUp) j += 1 # Create Response part with open(destination_folder", "0 or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example", "+= f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg, str): eg = '\"' +", "\"$ref\" in component: component = component[\"$ref\"].split(\"/\")[1:] elif \"items\" in component", "\"{\\n\" line = [] for name, properties in request_object_properties.items(): type_", "colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n' else: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n'", "= request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_", "ref[-1] + \" object\" if ref not in item_list: item_list.append(ref)", "item_list for section, source in documentations.items(): yaml_file = open(source) doc", "html_file.write('<h4>404 Not Found Error</h4>\\n') html_file.write('<p>The requested item, index, page was", "width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/> {description_ + enum}</td>\\n</tr>\\n' if \"example\" in", "part if having one if \"description\" in content: with open(destination_folder", "+ \"</p>\\n\") html_file.write(writeUp) j += 1 # Create Response part", "description_ += \".\" writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/>", "= ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif \"$ref\" in", "item[\"schema\"][\"minimum\"] elif \"maximum\" in item[\"schema\"]: description_ += f' Maximum: {item[\"schema\"][\"maximum\"]}'", "\"Array\" in type_: example_ += \"\\n\" + tab + \"", "Description part if having one if \"description\" in content: with", "in prop_type: type_ = prop_type + f'$({prop_type[\"format\"]})' + \" object\"", "<br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += f'[\\n", "if \"example\" in properties: eg = properties[\"example\"] type_ += f'<br/><i><sub>example:", "writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 return writeUp def ExampleWriting(request_object_properties,", "type_ = array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:] type_", "elif code == \"default\": html_file.write('<h4>Default Generic Error</h4>\\n') writeUp = ResponseTable(properties)", "prop_type[\"format\"] == \"date-time\": example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += \"0\"", "ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif \"$ref\" in properties:", "\"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"type\"", "with open(destination_folder / f'{j:02} Introduction.html', \"w\") as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\")", "or type_ == \"number\": example_ += \"0\" elif type_ ==", "\" Array\" request_object_ = doc for path in ref: request_object_", "in request_object_properties.items(): type_ = properties[\"type\"] if \"type\" in properties else", "[], 1) if \"type\" in array_obj: type_ = array_obj[\"type\"] +", "example_, __, __ = ExampleWriting(request_object_properties_, [], 1) if \"type\" in", "\"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] example_, __, __ =", "component and \"$ref\" in component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array =", "doc for item in path: request_object = request_object[item] if \"enum\"", "prop_type + f'$({prop_type[\"format\"]})' + \" object\" if prop_type[\"format\"] == \"date-time\":", "writeUp += f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td", "else: example_ = tab + f' \"{name}\": {properties[\"enum\"][0]}' if \"example\"", "properties: add_prop = properties[\"additionalProperties\"] if \"type\" in add_prop: prop_type =", "in ref: request_object_ = request_object_[item] if \"properties\" in request_object_: request_object_properties_", "description_ = request_object_[\"description\"] if \"description\" in request_object_ else \"/\" write_up,", "in params: example_ = \"/\" description_ = \"Optional. \" if", "colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized response from the API. Key is", "f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += '\"string\"' if description_[-1]", "description_ = \"\" if \"parameters\" in content: writeUp = RequestTable(api_call,", "ref[-1] + \" Array\" request_object_ = doc for path in", "f' Maximum: {item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"] elif \"default\" in item[\"schema\"]:", "+= 1 else: writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp +=", "\"example\" in properties: eg = properties[\"example\"] type_ += f'<br/><i><sub>example: {eg}</sub></i>'", "request_object = doc for item in path: request_object = request_object[item]", "write_up elif \"type\" in request_object_: properties = request_object_properties_ = request_object_", "\".\" if \"type\" in item[\"schema\"]: type_ = item[\"schema\"][\"type\"] else: type_", "type_ == \"number\": example_ += \"0\" elif type_ == \"boolean\":", "request_object_[item] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] write_up, __,", "array: array = False order -= 1 for line in", "description_ += \" \" writeUp = ResponseTable(content[\"requestBody\"]) else: writeUp =", "(example != \"[\\n {\\n\" and array): example += \",\\n\" example_", "description_ += f'The <code>{api_call}</code> API accepts requests in the following", "\"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params): writeUp = '<table", "+ '\"' example_ = tab + f' \"{name}\": {eg}' if", "description_ += \".\" if \"enum\" in properties: type_ += \"", "+= tab + \" \" * 2 + write_up elif", "example, html_property, item_list = ExampleWriting(request_object_properties, item_list, array, order) if array:", "\" ]\" if order == 0 or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td>", "Enum\" description_ += f' Options: {str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"' if", "Create Description part if having one if \"description\" in content:", "request_object_: properties = request_object_properties_ = request_object_ type_ = request_object_[\"type\"] description_", "False order = 0 if \"content\" in requestBody: component =", "example = \"[\\n {\\n\" else: example = \"{\\n\" line =", "\"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ =", "description_[-1] != \".\": description_ += \".\" writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td>", "RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\" in content: if \"description\" in content[\"requestBody\"]:", "html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API provides a response in the", "= request_object[item] if \"items\" in request_object and \"oneOf\" in request_object[\"items\"]:", "else \"object\" description_ = properties[\"description\"] if \"description\" in properties else", "tab + f' \"{name}\": ' if type_ == \"array\": example_", "request_object_properties_ = request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1)", "class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp +=", "type_ == \"array\": example_ += '[\\n' if \"type\" in properties[\"items\"]:", "\"\\n\" + tab + \"}\\n\" + \" \" * (order-1)", "parameters.</td>\\n</tr>\\n</table>' description_ += f'The <code>{api_call}</code> API accepts requests in the", "# Create Description part if having one if \"description\" in", "if \"post\" in result else result[\"get\"] # Create path if", "example_ = item[\"schema\"][\"minimum\"] elif \"maximum\" in item[\"schema\"]: description_ += f'", "description_ += item[\"description\"] if description_[-1] != \".\": description_ += \".\"", "array=False, order=0): tab = \" \" * order if array:", "doc for item in ref: request_object_ = request_object_[item] if \"properties\"", "request_object_ = request_object_[path] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"]", "class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in request_object: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code>", "request_object[\"properties\"] elif \"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1 continue", "Success</h4>\\n') elif code == \"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n')", "\".\": description_ += \".\" if \"type\" in item[\"schema\"]: type_ =", "section-example-container\"><pre>\\n' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1", "request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"type\" in request_object", "API. Key is missing, invalid, or timestamp is too old", "item or not item[\"required\"] else \"\" description_ += item[\"description\"] if", "+ tab + \"}\\n\" + \" \" * (order-1) +", "{item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"] elif \"maximum\" in item[\"schema\"]: description_ +=", "Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\"", "request_object and \"oneOf\" in request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"] example =", "{\\n\" and array): example += \",\\n\" example_ = tab +", "item[\"schema\"][\"items\"] if \"$ref\" in array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1] + \"", "request_object_ = request_object_[item] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"]", "tab + f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in properties[\"items\"]: ref =", "type_ = type_ + \" Enum\" description_ += f' Options:", "object\" if ref not in item_list: item_list.append(ref) request_object_ = doc", "Response part with open(destination_folder / f'{j:02} Responses.html', \"w\") as html_file:", "html_file.write('</p>\\n') j += 1 # Create Request part with open(destination_folder", "example_ += write_up elif \"$ref\" in properties: ref = properties[\"$ref\"].split(\"/\")[1:]", "in item[\"schema\"]: description_ += f' Minimum: {item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"]", "writeUp += f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli", "html_property: writeUp += line writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n'", "item[\"schema\"]: description_ += f' Minimum: {item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"] elif", "html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized", "example_ = tab + f' \"{name}\": {properties[\"enum\"][0]}' if \"example\" in", "writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += example writeUp", "request_object[item] if \"enum\" in request_object: enum = \" Options: \"", "\" Options: \" + str(request_object[\"enum\"]) description_ = request_object[\"description\"] if description_[-1]", "writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n' else: writeUp +=", "else: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp += '</tr>\\n</thead>\\n' example,", "+= '</tr>\\n</thead>' for y in prop: path = y[\"$ref\"].split(\"/\")[1:] name", "\"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] description_ = request_object_[\"description\"] if", "writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 return", "+= f' Default: {item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"] if type_ ==", "type_: example_ = '0' elif \"boolean\" in type_: example_ =", "html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j += 1 # Create Description", "elif type_ == \"integer\" or type_ == \"number\": example_ +=", "= True order += 1 else: writeUp += '<table class=\"table", "writeUp = ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation of {section} is generated and", "\"$ref\" in add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] +", "= request_object[\"properties\"] elif \"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1", "ref = add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\" if", "in array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\" ref =", "type_ = prop_type + \" object\" example_ += f'\"{prop_type}\"' elif", "elif \"type\" in request_object_: properties = request_object_properties_ = request_object_ type_", "j = 1 content = result[\"post\"] if \"post\" in result", "if \"description\" in content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"]) if description_[-1] !=", "object\" if prop_type[\"format\"] == \"date-time\": example_ += \"2021-11-26T15:18:27.693Z\" else: example_", "in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"type\" in", "writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else:", "properties: type_ += f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\" else: example_ +=", "as html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API provides a response in", "Options : {properties[\"enum\"]}' if \"string\" in type_: example_ = tab", "elif \"requestBody\" in content: if \"description\" in content[\"requestBody\"]: description_ =", "following format:\\n') html_file.write('</p>\\n') request_body = content[\"responses\"] for code, properties in", "in content: if \"description\" in content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"]) if", "else: example = \"{\\n\" line = [] for name, properties", "if array: example = \"[\\n {\\n\" else: example = \"{\\n\"", "in the following format:\\n' html_file.write(\"<p>\\n\" + description_ + \"</p>\\n\") html_file.write(writeUp)", "with open(destination_folder / f'{j:02} Responses.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'The", "+= '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += example writeUp +=", "doc[\"paths\"] for api_call, result in paths.items(): j = 1 content", "else result[\"get\"] # Create path if not exist destination_folder =", "properties: eg = properties[\"example\"] type_ += f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg,", "request_object_ = doc for path in ref: request_object_ = request_object_[path]", "open(destination_folder / f'{j:02} Request.html', \"w\") as html_file: description_ = \"\"", "= request_object_[\"description\"] if \"description\" in request_object_ else \"/\" write_up, __,", "response from the API. Key is missing, invalid, or timestamp", "= False order = 0 if \"content\" in requestBody: component", "\" + str(request_object[\"enum\"]) description_ = request_object[\"description\"] if description_[-1] != \".\":", "= array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\" request_object_ =", "is too old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/>", "<code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f' \"{item[\"name\"]}\": {example_},\\n' return writeUp + example", "request_object[\"example\"] elif \"enum\" in request_object: text = '\"' + request_object[\"enum\"][0]", "(example != \"{\\n\" and not array) or (example != \"[\\n", "= '0' elif \"boolean\" in type_: example_ = 'true' writeUp", "elif \"oneOf\" in request_object: for y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i", "+= 1 return writeUp def ExampleWriting(request_object_properties, item_list, array=False, order=0): tab", "item in path: request_object = request_object[item] if \"enum\" in request_object:", "tab + \" \" * 2 + write_up elif type_", "\"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model -", "item_list.append(ref) request_object_ = doc for item in ref: request_object_ =", "# Create Introduction part with open(destination_folder / f'{j:02} Introduction.html', \"w\")", "= '\"string\"' elif \"number\" in type_ or \"integer\" in type_:", "\"{name}\": {properties[\"enum\"][0]}' if \"example\" in properties: eg = properties[\"example\"] type_", "\"description\" in request_object_ else \"/\" elif type_ == \"integer\" or", "\".\": description_ += \".\" writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code>", "type_: example_ += \"\\n\" + tab + \" ]\" if", "properties[\"items\"]: type_ = properties[\"items\"][\"type\"] + \" Array\" example_ += tab", "content: if \"description\" in content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"]) if description_[-1]", "str): eg = '\"' + eg + '\"' example_ =", "request_object_properties_ = request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+2)", "writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>'", "\"items\" in component and \"$ref\" in component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:]", "colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code>", "i += 1 continue elif \"type\" in request_object and \"properties\"", "'</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 return writeUp def ExampleWriting(request_object_properties, item_list, array=False,", "not found.</p>\\n') continue elif code == \"default\": html_file.write('<h4>Default Generic Error</h4>\\n')", "in ref: request_object_ = request_object_[path] if \"properties\" in request_object_: request_object_properties_", "elif \"items\" in component and \"$ref\" in component[\"items\"]: component =", "+= '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 continue elif \"oneOf\" in request_object:", "item_list, array, order) if array: array = False order -=", "qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div", "following format:\\n' html_file.write(\"<p>\\n\" + description_ + \"</p>\\n\") html_file.write(writeUp) j +=", "example + \"\\n\" + tab + \"}\", line, item_list return", "request_object = request_object[item] if \"enum\" in request_object: enum = \"", "+= '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else: component = requestBody[\"$ref\"].split(\"/\")[1:] item_list =", "in properties: eg = properties[\"example\"] type_ += f'<br/><i><sub>example: {eg}</sub></i>' if", "documentations.items(): yaml_file = open(source) doc = yaml.load(yaml_file, Loader=yaml.Loader) paths =", "\" * order if array: example = \"[\\n {\\n\" else:", "== \"array\": example_ += '[\\n' if \"type\" in properties[\"items\"]: type_", "enum = \"\" item_list.append(path) request_object = doc for item in", "if array: array = False order -= 1 for line", "html_file.write('</p>\\n') request_body = content[\"responses\"] for code, properties in request_body.items(): if", "ResponseTable(requestBody): writeUp = \"\" array = False order = 0", "line = [] for name, properties in request_object_properties.items(): type_ =", "== \"object\": if \"additionalProperties\" in properties: add_prop = properties[\"additionalProperties\"] if", "Minimum: {item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"] elif \"maximum\" in item[\"schema\"]: description_", "result in paths.items(): j = 1 content = result[\"post\"] if", "type_ = ref[-1] + \" Array\" request_object_ = doc for", "= '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example", "+= example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 return writeUp", "\"oneOf\" in request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div", "- Unauthorized response from the API. Key is missing, invalid,", "+ example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp = \"\" array", "+= write_up elif \"$ref\" in properties: ref = properties[\"$ref\"].split(\"/\")[1:] type_", "+ enum}</td>\\n</tr>\\n' if \"example\" in request_object: text = request_object[\"example\"] elif", "+ \" object\" if ref not in item_list: item_list.append(ref) request_object_", "if not exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) # Create", "\"\\n\" + tab + \" ]\" if order == 0", "order=order+1) example_ += write_up elif \"$ref\" in properties: ref =", "description_ += \".\" description_ += \" \" writeUp = ResponseTable(content[\"requestBody\"])", "+ \" object\" if prop_type[\"format\"] == \"date-time\": example_ += \"2021-11-26T15:18:27.693Z\"", "if description_[-1] != \".\": description_ += \".\" writeUp += f'\\n<tr>\\n<td", "+= f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n'", "+= '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp", "found.</p>\\n') continue elif code == \"default\": html_file.write('<h4>Default Generic Error</h4>\\n') writeUp", "ref = array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\" request_object_", "as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j += 1 # Create", "yaml.load(yaml_file, Loader=yaml.Loader) paths = doc[\"paths\"] for api_call, result in paths.items():", "class=\"cli section-example-container\"><pre>\\n{\\n' for item in params: example_ = \"/\" description_", "else \"\" description_ += item[\"description\"] if description_[-1] != \".\": description_", "item in params: example_ = \"/\" description_ = \"Optional. \"", "if \"Array\" in type_: example_ += \"\\n\" + tab +", "Loader=yaml.Loader) paths = doc[\"paths\"] for api_call, result in paths.items(): j", "in properties: ref = properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \"", "ref not in item_list: item_list.append(ref) request_object_ = doc for item", "example_ = tab + f' \"{name}\": ' if type_ ==", "- {request_object[\"description\"]}</th>\\n' else: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp +=", "html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j += 1 # Create Request part", "\"/\" if (example != \"{\\n\" and not array) or (example", "request_object_[item] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] description_ =", "\"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] write_up, __, item_list =", "str(content[\"requestBody\"][\"description\"]) if description_[-1] != \".\": description_ += \".\" description_ +=", "is missing, invalid, or timestamp is too old for hash.</th>\\n')", "+= f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n' else: writeUp += f'<th", "<br/> Header</td>\\n</tr>\\n</table>\\n') continue elif code == \"404\": html_file.write('<h4>404 Not Found", "path = y[\"$ref\"].split(\"/\")[1:] name = path[-1] enum = \"\" item_list.append(path)", "writeUp = RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\" in content: if \"description\"", "Default: {item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"] if type_ == \"array\": array_obj", "class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td", "i += 1 return writeUp def ExampleWriting(request_object_properties, item_list, array=False, order=0):", "+= f'<th colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>' for", "'0' elif \"boolean\" in type_: example_ = 'true' writeUp +=", "prop: path = y[\"$ref\"].split(\"/\")[1:] name = path[-1] enum = \"\"", "Model</th>\\n' writeUp += '</tr>\\n</thead>\\n' example, html_property, item_list = ExampleWriting(request_object_properties, item_list,", "1 # Create Description part if having one if \"description\"", "content[\"responses\"] for code, properties in request_body.items(): if code == \"200\":", "takes no parameters.</td>\\n</tr>\\n</table>' description_ += f'The <code>{api_call}</code> API accepts requests", "writeUp += f'</tr>\\n<td><code>{api_call}</code> method takes no parameters.</td>\\n</tr>\\n</table>' description_ += f'The", "elif \"boolean\" in type_: example_ = 'true' writeUp += f'\\n<tr>\\n<td", "= item[\"schema\"][\"type\"] else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in item[\"schema\"]:", "type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in item[\"schema\"]: description_ += f'", "code, properties in request_body.items(): if code == \"200\": html_file.write('<h4>200 Success</h4>\\n')", "format:\\n') html_file.write('</p>\\n') request_body = content[\"responses\"] for code, properties in request_body.items():", "class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n' writeUp", "order=0): tab = \" \" * order if array: example", "Header</td>\\n</tr>\\n</table>\\n') continue elif code == \"404\": html_file.write('<h4>404 Not Found Error</h4>\\n')", "path: request_object = request_object[item] if \"enum\" in request_object: enum =", "tab + f' \"{name}\": {properties[\"enum\"][0]}' if \"example\" in properties: eg", "<code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp +=", "description_ = properties[\"description\"] if \"description\" in properties else \"/\" if", "'<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n'", "= requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in component: component = component[\"$ref\"].split(\"/\")[1:] elif", "in path: request_object = request_object[item] if \"enum\" in request_object: enum", "description_ = request_object_[\"description\"] if \"description\" in request_object_ else \"/\" elif", "in add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \"", "or timestamp is too old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td>", "+= tab + f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in properties[\"items\"]: ref", "example_ = f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not in type_: if \"string\"", "= doc for item in path: request_object = request_object[item] if", "item[\"description\"] if description_[-1] != \".\": description_ += \".\" if \"type\"", "!= \"[\\n {\\n\" and array): example += \",\\n\" example_ =", "== \"404\": html_file.write('<h4>404 Not Found Error</h4>\\n') html_file.write('<p>The requested item, index,", "+= '[\\n' if \"type\" in properties[\"items\"]: type_ = properties[\"items\"][\"type\"] +", "request_object and \"properties\" not in request_object: request_object_properties = {item: request_object}", "= request_object_[\"properties\"] description_ = request_object_[\"description\"] if \"description\" in request_object_ else", "in request_object_ else \"/\" elif type_ == \"integer\" or type_", "writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in request_object: writeUp", "in request_object: text = '\"' + request_object[\"enum\"][0] + '\"' example", "array) or (example != \"[\\n {\\n\" and array): example +=", "Error</h4>\\n') html_file.write('<p>The requested item, index, page was not found.</p>\\n') continue", "+= f'\\n {text},' example += '\\b\\n ]\\n]' writeUp += example", "+ \" ]\" if order == 0 or array: line.append(f'<tr>\\n<td", "enum = \" Options: \" + str(request_object[\"enum\"]) description_ = request_object[\"description\"]", "ResponseTable(content[\"requestBody\"]) else: writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th", "item[\"schema\"]: description_ += f' Default: {item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"] if", "if \"enum\" in request_object: enum = \" Options: \" +", "request_object_[\"properties\"] description_ = request_object_[\"description\"] if \"description\" in request_object_ else \"/\"", "'\"' example_ = tab + f' \"{name}\": {eg}' if \"Array\"", "example_ += '\"string\"' if description_[-1] != \".\": description_ += \".\"", "result[\"get\"] # Create path if not exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"]))", "not array: return example + \"\\n\" + tab + \"}\",", "text = request_object[\"example\"] elif \"enum\" in request_object: text = '\"'", "\" \" writeUp = ResponseTable(content[\"requestBody\"]) else: writeUp = '<table class=\"table", "\"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else: component = requestBody[\"$ref\"].split(\"/\")[1:]", "= [component] i = 0 while i < len(item_list): request_object", "str(request_object[\"enum\"]) description_ = request_object[\"description\"] if description_[-1] != \".\": description_ +=", "writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td", "== \"boolean\": example_ += \"true\" elif type_ == \"string\": if", "or \"integer\" in type_: example_ = '0' elif \"boolean\" in", "part with open(destination_folder / f'{j:02} Request.html', \"w\") as html_file: description_", "the API. Key is missing, invalid, or timestamp is too", "< len(item_list): request_object = doc for item in item_list[i]: request_object", "in the following format:\\n') html_file.write('</p>\\n') request_body = content[\"responses\"] for code,", "continue elif code == \"404\": html_file.write('<h4>404 Not Found Error</h4>\\n') html_file.write('<p>The", "Array\" if \"enum\" in array_obj: type_ = type_ + \"", "example_ += \"true\" elif type_ == \"string\": if \"format\" in", "open(destination_folder / f'{j:02} Introduction.html', \"w\") as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\")", "Introduction.html', \"w\") as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j += 1", "= request_object[item] if \"enum\" in request_object: enum = \" Options:", "example_ = tab + f' \"{name}\": {eg}' if \"Array\" in", "1 # Create Request part with open(destination_folder / f'{j:02} Request.html',", "<td> <code>{request_object[\"type\"]}</code> <br/> {description_ + enum}</td>\\n</tr>\\n' if \"example\" in request_object:", "object\" example_ += f'\"{prop_type}\"' elif \"$ref\" in add_prop: ref =", "\" Array\" if ref not in item_list: item_list.append(ref) request_object_ =", "and \"oneOf\" in request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td", "{item: request_object} writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in", "if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] description_ = request_object_[\"description\"]", "\"/\" description_ = \"Optional. \" if \"required\" not in item", "= item[\"schema\"][\"items\"] if \"$ref\" in array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1] +", "if order == 0 or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code>", "class=\"cli section-example-container\"><pre>\\n' writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return", "if \"description\" in request_object_ else \"/\" write_up, __, item_list =", "if \"minimum\" in item[\"schema\"]: description_ += f' Minimum: {item[\"schema\"][\"minimum\"]}' example_", "\" object\" if prop_type[\"format\"] == \"date-time\": example_ += \"2021-11-26T15:18:27.693Z\" else:", "continue elif \"oneOf\" in request_object: for y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:])", "request_object_[path] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] example_, __,", "= \"/\" description_ = \"Optional. \" if \"required\" not in", "\"string\" in type_: example_ = '\"string\"' elif \"number\" in type_", "\"description\" in properties else \"/\" if (example != \"{\\n\" and", "<td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example += example_ if not array:", "in content: with open(destination_folder / f'{j:02} Description.html', \"w\") as html_file:", "write_up elif type_ == \"object\": if \"additionalProperties\" in properties: add_prop", "= \"[\\n {\\n\" else: example = \"{\\n\" line = []", "writeUp + example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp = \"\"", "path in ref: request_object_ = request_object_[path] if \"properties\" in request_object_:", "\"$ref\" in properties: ref = properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] +", "if \"type\" in properties[\"items\"]: type_ = properties[\"items\"][\"type\"] + \" Array\"", "in type_: example_ = 'true' writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td>", "\"enum\" in properties: type_ += \" Enum\" description_ += f'", "not in type_: if \"string\" in type_: example_ = '\"string\"'", "= \"Optional. \" if \"required\" not in item or not", "example += \",\\n\" example_ = tab + f' \"{name}\": '", "in type_: if \"string\" in type_: example_ = '\"string\"' elif", "type_: if \"string\" in type_: example_ = '\"string\"' elif \"number\"", "len(item_list): request_object = doc for item in item_list[i]: request_object =", "+ write_up elif type_ == \"object\": if \"additionalProperties\" in properties:", "example + \"\\n\" + tab + \"}\\n\" + \" \"", "name = path[-1] enum = \"\" item_list.append(path) request_object = doc", "= request_object_ type_ = request_object_[\"type\"] description_ = request_object_[\"description\"] if \"description\"", "= tab + f' \"{name}\": {eg}' if \"Array\" in type_:", "name, properties in request_object_properties.items(): type_ = properties[\"type\"] if \"type\" in", "= 0 while i < len(item_list): request_object = doc for", "html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized response from the API. Key", "{description_ + enum}</td>\\n</tr>\\n' if \"example\" in request_object: text = request_object[\"example\"]", "+= f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp += '</tr>\\n</thead>\\n' example, html_property, item_list", "request_object_: request_object_properties_ = request_object_[\"properties\"] description_ = request_object_[\"description\"] if \"description\" in", "in item[\"schema\"]: description_ += f' Default: {item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"]", "ExampleWriting(request_object_properties, item_list, array=False, order=0): tab = \" \" * order", "def ExampleWriting(request_object_properties, item_list, array=False, order=0): tab = \" \" *", "request_object: request_object_properties = {item: request_object} writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n'", "if prop_type[\"format\"] == \"date-time\": example_ += \"2021-11-26T15:18:27.693Z\" else: example_ +=", "1 content = result[\"post\"] if \"post\" in result else result[\"get\"]", "i = 0 while i < len(item_list): request_object = doc", "class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n' writeUp", "one if \"description\" in content: with open(destination_folder / f'{j:02} Description.html',", "= prop_type + f'$({prop_type[\"format\"]})' + \" object\" if prop_type[\"format\"] ==", "open(destination_folder / f'{j:02} Responses.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code>", "{properties[\"enum\"][0]}' if \"example\" in properties: eg = properties[\"example\"] type_ +=", "code == \"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code>", "html_file.write('<p>The requested item, index, page was not found.</p>\\n') continue elif", "else: component = requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component] i = 0", "\"number\": example_ += \"0\" elif type_ == \"boolean\": example_ +=", "'true' writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f'", "prop_type: type_ = prop_type + f'$({prop_type[\"format\"]})' + \" object\" if", "order) if array: array = False order -= 1 for", "writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f' \"{item[\"name\"]}\":", "+= f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else: component", "[' writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{item}</code>", "' if type_ == \"array\": example_ += '[\\n' if \"type\"", "component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True order += 1 else: writeUp +=", "exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) # Create Introduction part", "array): example += \",\\n\" example_ = tab + f' \"{name}\":", "item_list return example + \"\\n\" + tab + \"}\\n\" +", "\"enum\" in array_obj: type_ = type_ + \" Enum\" description_", "'<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp +=", "in item or not item[\"required\"] else \"\" description_ += item[\"description\"]", "old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue", "not in request_object: request_object_properties = {item: request_object} writeUp += '<table", "{item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"] elif \"default\" in item[\"schema\"]: description_ +=", "item[\"schema\"][\"type\"] else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in item[\"schema\"]: description_", "in array_obj: type_ = type_ + \" Enum\" description_ +=", "\"required\" not in item or not item[\"required\"] else \"\" description_", "f'\\n {text},' example += '\\b\\n ]\\n]' writeUp += example writeUp", "\"200\": html_file.write('<h4>200 Success</h4>\\n') elif code == \"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table", "\".\" if \"enum\" in properties: type_ += \" Enum\" description_", "= '\"' + eg + '\"' example_ = tab +", "j += 1 # Create Description part if having one", "for item in item_list[i]: request_object = request_object[item] if \"items\" in", "\".\" writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/> {description_ +", "= '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp", "in request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli", "prop_type = add_prop[\"type\"] if \"format\" in prop_type: type_ = prop_type", "continue elif \"properties\" in request_object: request_object_properties = request_object[\"properties\"] elif \"content\"", "else: writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n'", "y in prop: path = y[\"$ref\"].split(\"/\")[1:] name = path[-1] enum", "<td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp", "array = False order = 0 if \"content\" in requestBody:", "= item[\"schema\"][\"minimum\"] elif \"maximum\" in item[\"schema\"]: description_ += f' Maximum:", "result else result[\"get\"] # Create path if not exist destination_folder", "\"post\" in result else result[\"get\"] # Create path if not", "type_ == \"array\": array_obj = item[\"schema\"][\"items\"] if \"$ref\" in array_obj:", "if \"enum\" in array_obj: type_ = type_ + \" Enum\"", "the following format:\\n') html_file.write('</p>\\n') request_body = content[\"responses\"] for code, properties", "for item in ref: request_object_ = request_object_[item] if \"properties\" in", "= ExampleWriting(request_object_properties_, item_list, order=order+2) example_ += tab + \" \"", "array_obj = item[\"schema\"][\"items\"] if \"$ref\" in array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1]", "\"default\" in item[\"schema\"]: description_ += f' Default: {item[\"schema\"][\"default\"]}' example_ =", "request_object_ type_ = request_object_[\"type\"] description_ = request_object_[\"description\"] if \"description\" in", "accepts requests in the following format:\\n' html_file.write(\"<p>\\n\" + description_ +", "example_ = \"/\" description_ = \"Optional. \" if \"required\" not", "example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp += '<table", "== 0 or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n')", "page was not found.</p>\\n') continue elif code == \"default\": html_file.write('<h4>Default", "properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\" if ref not", "+= \",\\n\" example_ = tab + f' \"{name}\": ' if", "example_ += tab + \" \" * 2 + write_up", "\"enum\" in request_object: text = '\"' + request_object[\"enum\"][0] + '\"'", "= tab + f' \"{name}\": \"{properties[\"enum\"][0]}\"' else: example_ = tab", "add_prop = properties[\"additionalProperties\"] if \"type\" in add_prop: prop_type = add_prop[\"type\"]", "item[\"schema\"]: type_ = item[\"schema\"][\"type\"] else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\"", "type_ or \"integer\" in type_: example_ = '0' elif \"boolean\"", "request_object[item] if \"items\" in request_object and \"oneOf\" in request_object[\"items\"]: prop", "f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp += '</tr>\\n</thead>\\n' example, html_property, item_list =", "in content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"]) if description_[-1] != \".\": description_", "+= '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in request_object: writeUp +=", "for item in params: example_ = \"/\" description_ = \"Optional.", "+= line writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp +=", "\"w\") as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j += 1 #", "\"format\" in properties: type_ += f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\" else:", "\"w\") as html_file: description_ = \"\" if \"parameters\" in content:", "\"additionalProperties\" in properties: add_prop = properties[\"additionalProperties\"] if \"type\" in add_prop:", "elif type_ == \"string\": if \"format\" in properties: type_ +=", "\"example\" in request_object: text = request_object[\"example\"] elif \"enum\" in request_object:", "f' Options: {str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not in", "= item[\"schema\"][\"maximum\"] elif \"default\" in item[\"schema\"]: description_ += f' Default:", "Introduction part with open(destination_folder / f'{j:02} Introduction.html', \"w\") as html_file:", "requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in component: component =", "if \"string\" in type_: example_ = tab + f' \"{name}\":", "\"true\" elif type_ == \"string\": if \"format\" in properties: type_", "if \"$ref\" in array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\"", "eg = properties[\"example\"] type_ += f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg, str):", "array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1]", "line, item_list return example + \"\\n\" + tab + \"}\\n\"", "elif code == \"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th", "+ eg + '\"' example_ = tab + f' \"{name}\":", "\"description\" in request_object_ else \"/\" write_up, __, item_list = ExampleWriting(request_object_properties_,", "in type_: example_ = '\"string\"' elif \"number\" in type_ or", "!= \".\": description_ += \".\" description_ += \" \" writeUp", "example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp = \"\" array =", "'</tr>\\n</thead>\\n' example, html_property, item_list = ExampleWriting(request_object_properties, item_list, array, order) if", "component = requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in component: component = component[\"$ref\"].split(\"/\")[1:]", "elif \"maximum\" in item[\"schema\"]: description_ += f' Maximum: {item[\"schema\"][\"maximum\"]}' example_", "properties[\"type\"] if \"type\" in properties else \"object\" description_ = properties[\"description\"]", "width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f' \"{item[\"name\"]}\": {example_},\\n' return writeUp", "= yaml.load(yaml_file, Loader=yaml.Loader) paths = doc[\"paths\"] for api_call, result in", "or (example != \"[\\n {\\n\" and array): example += \",\\n\"", "writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp", "width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i", "= array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:] type_ =", "was not found.</p>\\n') continue elif code == \"default\": html_file.write('<h4>Default Generic", "\" Enum\" description_ += f' Options: {str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"'", "\"{item[\"name\"]}\": {example_},\\n' return writeUp + example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody):", "\"\\n\" + tab + \"}\", line, item_list return example +", "+ \" Array\" if ref not in item_list: item_list.append(ref) request_object_", "in request_object_: request_object_properties_ = request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_,", "+= 1 # Create Response part with open(destination_folder / f'{j:02}", "not array) or (example != \"[\\n {\\n\" and array): example", "Create Response part with open(destination_folder / f'{j:02} Responses.html', \"w\") as", "width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue elif code == \"404\":", "1 continue elif \"type\" in request_object and \"properties\" not in", "\"properties\" not in request_object: request_object_properties = {item: request_object} writeUp +=", "+ f' \"{name}\": {eg}' if \"Array\" in type_: example_ +=", "{\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params): writeUp", "\"0\" else: type_ = prop_type + \" object\" example_ +=", "writeUp = \"\" array = False order = 0 if", "+= f' Options : {properties[\"enum\"]}' if \"string\" in type_: example_", "0 if \"content\" in requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\"", "qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n' writeUp +=", "i += 1 continue elif \"oneOf\" in request_object: for y", "\"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API provides a response", "properties[\"description\"] if \"description\" in properties else \"/\" if (example !=", "line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example += example_ if", "example = \"{\\n\" line = [] for name, properties in", "if ref not in item_list: item_list.append(ref) request_object_ = doc for", "type_ += \" Enum\" description_ += f' Options : {properties[\"enum\"]}'", "* 2 + write_up elif type_ == \"object\": if \"additionalProperties\"", "Responses.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API provides a", "= type_ + \" Enum\" description_ += f' Options: {str(array_obj[\"enum\"])}'", "html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue elif code ==", "item_list: item_list.append(ref) request_object_ = doc for item in ref: request_object_", "example_ = tab + f' \"{name}\": \"{properties[\"enum\"][0]}\"' else: example_ =", "properties[\"additionalProperties\"] if \"type\" in add_prop: prop_type = add_prop[\"type\"] if \"format\"", "+= '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp +=", "in item_list[i]: request_object = request_object[item] if \"items\" in request_object and", "f'<th colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>' for y", "+ '\"' example += f'\\n {text},' example += '\\b\\n ]\\n]'", "isinstance(eg, str): eg = '\"' + eg + '\"' example_", "Key is missing, invalid, or timestamp is too old for", "<code>{api_call}</code> API accepts requests in the following format:\\n' html_file.write(\"<p>\\n\" +", "'<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in request_object: writeUp += f'<th", "line, item_list for section, source in documentations.items(): yaml_file = open(source)", "\"\" description_ += item[\"description\"] if description_[-1] != \".\": description_ +=", "item[\"schema\"][\"maximum\"] elif \"default\" in item[\"schema\"]: description_ += f' Default: {item[\"schema\"][\"default\"]}'", "prop = request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n ['", "item_list, order=order+1) example_ += write_up elif \"type\" in request_object_: properties", "provides a response in the following format:\\n') html_file.write('</p>\\n') request_body =", "exist_ok=True) # Create Introduction part with open(destination_folder / f'{j:02} Introduction.html',", "'<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example =", "in type_ or \"integer\" in type_: example_ = '0' elif", "invalid, or timestamp is too old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td>", "type_ == \"boolean\": example_ += \"true\" elif type_ == \"string\":", "+= \"0\" elif type_ == \"boolean\": example_ += \"true\" elif", "in request_object_ else \"/\" write_up, __, item_list = ExampleWriting(request_object_properties_, item_list,", "if \"Enum\" not in type_: if \"string\" in type_: example_", "/ f'{j:02} Introduction.html', \"w\") as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j", "1 continue elif \"oneOf\" in request_object: for y in request_object[\"oneOf\"]:", "'\"string\"' if description_[-1] != \".\": description_ += \".\" if \"enum\"", "Array\" example_ += tab + f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in", "html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j += 1 # Create Description part", "in request_object and \"oneOf\" in request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"] example", "in item_list: item_list.append(ref) request_object_ = doc for item in ref:", "for line in html_property: writeUp += line writeUp += '<tr>\\n<td", "requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component] i = 0 while i <", "in add_prop: prop_type = add_prop[\"type\"] if \"format\" in prop_type: type_", "Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized response from", "as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j += 1 # Create", "= component[\"$ref\"].split(\"/\")[1:] elif \"items\" in component and \"$ref\" in component[\"items\"]:", "\"oneOf\" in request_object: for y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i +=", "else \"/\" elif type_ == \"integer\" or type_ == \"number\":", "= ResponseTable(content[\"requestBody\"]) else: writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp +=", "item_list.append(path) request_object = doc for item in path: request_object =", "i < len(item_list): request_object = doc for item in item_list[i]:", "in type_: example_ = '0' elif \"boolean\" in type_: example_", "+= \".\" writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/> {description_", "\"{name}\": ' if type_ == \"array\": example_ += '[\\n' if", "with open(destination_folder / f'{j:02} Description.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n')", "continue elif code == \"default\": html_file.write('<h4>Default Generic Error</h4>\\n') writeUp =", "if \"type\" in item[\"schema\"]: type_ = item[\"schema\"][\"type\"] else: type_ =", "= properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\" if ref", "+= f'The <code>{api_call}</code> API accepts requests in the following format:\\n'", "properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\" if ref not", "\"items\" in request_object and \"oneOf\" in request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"]", "{description_}</td>\\n</tr>\\n') example += example_ if not array: return example +", "+= write_up elif \"type\" in request_object_: properties = request_object_properties_ =", "else: example_ += \"0\" else: type_ = prop_type + \"", "elif type_ == \"boolean\": example_ += \"true\" elif type_ ==", "writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp += '</tr>\\n</thead>\\n' example, html_property,", "<code>{api_call}</code> API provides a response in the following format:\\n') html_file.write('</p>\\n')", "elif \"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1 continue elif", "html_file.write(\"<p>\\n\" + description_ + \"</p>\\n\") html_file.write(writeUp) j += 1 #", "in component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True order +=", "array = False order -= 1 for line in html_property:", "in html_property: writeUp += line writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli", "params): writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{api_call}</code>", "= 0 if \"content\" in requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"] if", "colspan=\"2\"><code>{item_list[i][-1]}</code> Model</th>\\n' writeUp += '</tr>\\n</thead>\\n' example, html_property, item_list = ExampleWriting(request_object_properties,", "+ request_object[\"enum\"][0] + '\"' example += f'\\n {text},' example +=", "\"type\" in item[\"schema\"]: type_ = item[\"schema\"][\"type\"] else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1]", "ref: request_object_ = request_object_[item] if \"properties\" in request_object_: request_object_properties_ =", "example_ += f'\"{prop_type}\"' elif \"$ref\" in add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:]", "array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\" request_object_ = doc", "<code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example += example_ if not array: return", "array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:]", "+ f' \"{name}\": ' if type_ == \"array\": example_ +=", "properties in request_object_properties.items(): type_ = properties[\"type\"] if \"type\" in properties", "Model - {request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>' for y in prop:", "+= 1 continue elif \"oneOf\" in request_object: for y in", "\"\" array = False order = 0 if \"content\" in", "description_ = \"Optional. \" if \"required\" not in item or", "f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else: component =", "\"string\" in type_: example_ = tab + f' \"{name}\": \"{properties[\"enum\"][0]}\"'", "open(source) doc = yaml.load(yaml_file, Loader=yaml.Loader) paths = doc[\"paths\"] for api_call,", "+ str(request_object[\"enum\"]) description_ = request_object[\"description\"] if description_[-1] != \".\": description_", "= request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp", "html_property, item_list = ExampleWriting(request_object_properties, item_list, array, order) if array: array", "request_object: text = '\"' + request_object[\"enum\"][0] + '\"' example +=", "\" \" * order if array: example = \"[\\n {\\n\"", "{properties[\"enum\"]}' if \"string\" in type_: example_ = tab + f'", "= properties[\"items\"][\"type\"] + \" Array\" example_ += tab + f'", "+ f' \"{name}\": {properties[\"enum\"][0]}' if \"example\" in properties: eg =", "array_obj[\"type\"] + \" Array\" if \"enum\" in array_obj: type_ =", "\" Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \"", "html_file.write('<p>\\n') html_file.write(f'The <code>{api_call}</code> API provides a response in the following", "f'{j:02} Request.html', \"w\") as html_file: description_ = \"\" if \"parameters\"", "= \"\" array = False order = 0 if \"content\"", "in properties: add_prop = properties[\"additionalProperties\"] if \"type\" in add_prop: prop_type", "in content: writeUp = RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\" in content:", "writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 continue", "\" Array\" if \"enum\" in array_obj: type_ = type_ +", "writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 continue elif \"oneOf\" in", "in request_object and \"properties\" not in request_object: request_object_properties = {item:", "+= \".\" description_ += \" \" writeUp = ResponseTable(content[\"requestBody\"]) else:", "\"{name}\": {eg}' if \"Array\" in type_: example_ += \"\\n\" +", "elif \"type\" in request_object and \"properties\" not in request_object: request_object_properties", "= properties[\"description\"] if \"description\" in properties else \"/\" if (example", "'<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for item in params: example_ =", "= ExampleWriting(request_object_properties, item_list, array, order) if array: array = False", "description_ = str(content[\"requestBody\"][\"description\"]) if description_[-1] != \".\": description_ += \".\"", "== \"string\": if \"format\" in properties: type_ += f'(${properties[\"format\"]})' example_", "writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n'", "request_object} writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\" in request_object:", "path[-1] enum = \"\" item_list.append(path) request_object = doc for item", "\".\": description_ += \".\" if \"enum\" in properties: type_ +=", "properties = request_object_properties_ = request_object_ type_ = request_object_[\"type\"] description_ =", "1 else: writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th", "section-example-container\"><pre>\\n{\\n' for item in params: example_ = \"/\" description_ =", "+ \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp = \"\" array = False", "type_ = ref[-1] + \" object\" if ref not in", "1 # Create Response part with open(destination_folder / f'{j:02} Responses.html',", "if isinstance(eg, str): eg = '\"' + eg + '\"'", "destination_folder.mkdir(parents=True, exist_ok=True) # Create Introduction part with open(destination_folder / f'{j:02}", "example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for item in params:", "f'</tr>\\n<td><code>{api_call}</code> method takes no parameters.</td>\\n</tr>\\n</table>' description_ += f'The <code>{api_call}</code> API", "request_object_properties.items(): type_ = properties[\"type\"] if \"type\" in properties else \"object\"", "+= \" \" writeUp = ResponseTable(content[\"requestBody\"]) else: writeUp = '<table", "request_object_properties_ = request_object_[\"properties\"] example_, __, __ = ExampleWriting(request_object_properties_, [], 1)", "request_object[\"items\"]: prop = request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n", "item[\"schema\"][\"default\"] if type_ == \"array\": array_obj = item[\"schema\"][\"items\"] if \"$ref\"", "request_object_properties = {item: request_object} writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if", "tab + f' \"{name}\": \"{properties[\"enum\"][0]}\"' else: example_ = tab +", "\"[\\n {\\n\" and array): example += \",\\n\" example_ = tab", "== \"200\": html_file.write('<h4>200 Success</h4>\\n') elif code == \"401\": html_file.write('<h4>401 Authentication", "colspan=\"2\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>' example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for item", "'\"string\"' elif \"number\" in type_ or \"integer\" in type_: example_", "request_object_properties = request_object[\"properties\"] elif \"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i +=", "component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True order += 1", "]\" if order == 0 or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td>", "writeUp += f'<th colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>'", "writeUp def ExampleWriting(request_object_properties, item_list, array=False, order=0): tab = \" \"", "= doc for item in ref: request_object_ = request_object_[item] if", "(order-1) + \"]\", line, item_list for section, source in documentations.items():", "ExampleWriting(request_object_properties_, item_list, order=order+2) example_ += tab + \" \" *", "if code == \"200\": html_file.write('<h4>200 Success</h4>\\n') elif code == \"401\":", "type_ += f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg, str): eg = '\"'", "or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example +=", "path if not exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) #", "y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"properties\"", "item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif \"type\"", "write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up", "\"number\" in type_ or \"integer\" in type_: example_ = '0'", "Array\" if ref not in item_list: item_list.append(ref) request_object_ = doc", "import pathlib import yaml documentations = {\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha", "example_ += \"\\n\" + tab + \" ]\" if order", "f'The <code>{api_call}</code> API accepts requests in the following format:\\n' html_file.write(\"<p>\\n\"", "'</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp +=", "for item in path: request_object = request_object[item] if \"enum\" in", "item_list, order=order+1) example_ += write_up elif \"$ref\" in properties: ref", "Description.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j += 1", "type_ = array_obj[\"type\"] + \" Array\" if \"enum\" in array_obj:", "request_object_[\"properties\"] example_, __, __ = ExampleWriting(request_object_properties_, [], 1) if \"type\"", "example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += '\"string\"' if description_[-1] !=", "request_object: for y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1 continue", "description_[-1] != \".\": description_ += \".\" if \"type\" in item[\"schema\"]:", "\"maximum\" in item[\"schema\"]: description_ += f' Maximum: {item[\"schema\"][\"maximum\"]}' example_ =", "ref = properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\" if", "<td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue elif code == \"404\": html_file.write('<h4>404", "!= \".\": description_ += \".\" if \"type\" in item[\"schema\"]: type_", "type_ = properties[\"type\"] if \"type\" in properties else \"object\" description_", "Options: {str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not in type_:", "a response in the following format:\\n') html_file.write('</p>\\n') request_body = content[\"responses\"]", "if \"content\" in requestBody: component = requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in", "+ \" Enum\" description_ += f' Options: {str(array_obj[\"enum\"])}' example_ =", "request_object[\"items\"][\"oneOf\"] example = '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp +=", "* (order-1) + \"]\", line, item_list for section, source in", "if \"string\" in type_: example_ = '\"string\"' elif \"number\" in", "[component] i = 0 while i < len(item_list): request_object =", "code == \"200\": html_file.write('<h4>200 Success</h4>\\n') elif code == \"401\": html_file.write('<h4>401", "not item[\"required\"] else \"\" description_ += item[\"description\"] if description_[-1] !=", "add_prop: prop_type = add_prop[\"type\"] if \"format\" in prop_type: type_ =", "elif \"number\" in type_ or \"integer\" in type_: example_ =", "+ \" Array\" request_object_ = doc for path in ref:", "+= \"2021-11-26T15:18:27.693Z\" else: example_ += \"0\" else: type_ = prop_type", "f' \"{name}\": {properties[\"enum\"][0]}' if \"example\" in properties: eg = properties[\"example\"]", "order += 1 else: writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp", "for name, properties in request_object_properties.items(): type_ = properties[\"type\"] if \"type\"", "qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n' writeUp +=", "doc for path in ref: request_object_ = request_object_[path] if \"properties\"", "+= f'</tr>\\n<td><code>{api_call}</code> method takes no parameters.</td>\\n</tr>\\n</table>' description_ += f'The <code>{api_call}</code>", "example_ = '0' elif \"boolean\" in type_: example_ = 'true'", "in item[\"schema\"]: type_ = item[\"schema\"][\"type\"] else: type_ = item[\"schema\"][\"$ref\"].split(\"/\")[-1] if", "+= \"2021-11-26T15:18:27.693Z\" else: example_ += '\"string\"' if description_[-1] != \".\":", "{eg}' if \"Array\" in type_: example_ += \"\\n\" + tab", "+ \"}\\n\" + \" \" * (order-1) + \"]\", line,", "component = component[\"$ref\"].split(\"/\")[1:] elif \"items\" in component and \"$ref\" in", "component: component = component[\"$ref\"].split(\"/\")[1:] elif \"items\" in component and \"$ref\"", "item_list = ExampleWriting(request_object_properties, item_list, array, order) if array: array =", "'<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n'", "with open(destination_folder / f'{j:02} Request.html', \"w\") as html_file: description_ =", "enum}</td>\\n</tr>\\n' if \"example\" in request_object: text = request_object[\"example\"] elif \"enum\"", "1) if \"type\" in array_obj: type_ = array_obj[\"type\"] + \"", "== \"401\": html_file.write('<h4>401 Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model", "item_list, array=False, order=0): tab = \" \" * order if", "for section, source in documentations.items(): yaml_file = open(source) doc =", "+ \" Array\" if \"enum\" in array_obj: type_ = type_", "item in ref: request_object_ = request_object_[item] if \"properties\" in request_object_:", "f' \"{name}\": \"{properties[\"enum\"][0]}\"' else: example_ = tab + f' \"{name}\":", "+= f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code> method takes no", "writeUp += '</tr>\\n</thead>\\n' example, html_property, item_list = ExampleWriting(request_object_properties, item_list, array,", "y[\"$ref\"].split(\"/\")[1:] name = path[-1] enum = \"\" item_list.append(path) request_object =", "= f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not in type_: if \"string\" in", "+ \"}\", line, item_list return example + \"\\n\" + tab", "in properties: type_ += \" Enum\" description_ += f' Options", "item_list = ExampleWriting(request_object_properties_, item_list, order=order+2) example_ += tab + \"", "= 'true' writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example +=", "{request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>' for y in prop: path =", "html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j += 1 # Create Request", "Unauthorized response from the API. Key is missing, invalid, or", "+= example_ if not array: return example + \"\\n\" +", "+ \" object\" example_ += f'\"{prop_type}\"' elif \"$ref\" in add_prop:", "\" Enum\" description_ += f' Options : {properties[\"enum\"]}' if \"string\"", "= request_object_[\"type\"] description_ = request_object_[\"description\"] if \"description\" in request_object_ else", "+= \"0\" else: type_ = prop_type + \" object\" example_", "== \"default\": html_file.write('<h4>Default Generic Error</h4>\\n') writeUp = ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation", "= doc for path in ref: request_object_ = request_object_[path] if", "+= '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 return writeUp def ExampleWriting(request_object_properties, item_list,", "qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code> method", "<code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue elif code == \"404\": html_file.write('<h4>404 Not", "width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example += example_ if not", "in prop: path = y[\"$ref\"].split(\"/\")[1:] name = path[-1] enum =", "Enum\" description_ += f' Options : {properties[\"enum\"]}' if \"string\" in", "elif code == \"404\": html_file.write('<h4>404 Not Found Error</h4>\\n') html_file.write('<p>The requested", "+= 1 continue elif \"properties\" in request_object: request_object_properties = request_object[\"properties\"]", "2 + write_up elif type_ == \"object\": if \"additionalProperties\" in", "\"]\", line, item_list for section, source in documentations.items(): yaml_file =", "type_ == \"object\": if \"additionalProperties\" in properties: add_prop = properties[\"additionalProperties\"]", "example_ = item[\"schema\"][\"default\"] if type_ == \"array\": array_obj = item[\"schema\"][\"items\"]", "ref: request_object_ = request_object_[path] if \"properties\" in request_object_: request_object_properties_ =", "if \"enum\" in properties: type_ += \" Enum\" description_ +=", "example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 return writeUp def", "= ref[-1] + \" Array\" request_object_ = doc for path", "requests in the following format:\\n' html_file.write(\"<p>\\n\" + description_ + \"</p>\\n\")", "example_ = item[\"schema\"][\"maximum\"] elif \"default\" in item[\"schema\"]: description_ += f'", "properties[\"items\"][\"type\"] + \" Array\" example_ += tab + f' \"{properties[\"items\"][\"type\"]}\"'", "\"boolean\" in type_: example_ = 'true' writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td>", "__, __ = ExampleWriting(request_object_properties_, [], 1) if \"type\" in array_obj:", "= result[\"post\"] if \"post\" in result else result[\"get\"] # Create", "= '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n[\\n [' writeUp += '<table class=\"table", "description_ += f' Options: {str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\"", "html_file.write('<h4>Default Generic Error</h4>\\n') writeUp = ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation of {section}", "item_list, order=order+2) example_ += tab + \" \" * 2", "/ f'{j:02} Request.html', \"w\") as html_file: description_ = \"\" if", "source in documentations.items(): yaml_file = open(source) doc = yaml.load(yaml_file, Loader=yaml.Loader)", "in request_object_: request_object_properties_ = request_object_[\"properties\"] example_, __, __ = ExampleWriting(request_object_properties_,", "order=order+1) example_ += write_up elif \"type\" in request_object_: properties =", "content: writeUp = RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\" in content: if", "\"integer\" in type_: example_ = '0' elif \"boolean\" in type_:", "eg = '\"' + eg + '\"' example_ = tab", "Options: \" + str(request_object[\"enum\"]) description_ = request_object[\"description\"] if description_[-1] !=", "Authentication Error</h4>\\n<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized response", "\"integer\" or type_ == \"number\": example_ += \"0\" elif type_", "writeUp += f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code> method takes", "if \"example\" in request_object: text = request_object[\"example\"] elif \"enum\" in", "\"description\" in content: with open(destination_folder / f'{j:02} Description.html', \"w\") as", "in documentations.items(): yaml_file = open(source) doc = yaml.load(yaml_file, Loader=yaml.Loader) paths", "elif \"$ref\" in properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1]", "item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"type\" in request_object and", "array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/> {description_}</td>\\n</tr>\\n') example += example_", "j += 1 # Create Request part with open(destination_folder /", "if type_ == \"array\": example_ += '[\\n' if \"type\" in", "example += '\\b\\n ]\\n]' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>'", "= \" \" * order if array: example = \"[\\n", "+= '</tr>\\n</thead>\\n' example, html_property, item_list = ExampleWriting(request_object_properties, item_list, array, order)", "+= f' \"{item[\"name\"]}\": {example_},\\n' return writeUp + example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\"", "'\"' example += f'\\n {text},' example += '\\b\\n ]\\n]' writeUp", "section-example-container\"><pre>\\n[\\n [' writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th", "component[\"$ref\"].split(\"/\")[1:] elif \"items\" in component and \"$ref\" in component[\"items\"]: component", "width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for item in params: example_ = \"/\"", "== \"date-time\": example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += \"0\" else:", "+= f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td width=\"20%\">value</td>", "request_object = doc for item in item_list[i]: request_object = request_object[item]", "# Create Request part with open(destination_folder / f'{j:02} Request.html', \"w\")", "+= f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/> {description_ + enum}</td>\\n</tr>\\n' if", "\"date-time\": example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += \"0\" else: type_", "in request_object: request_object_properties = request_object[\"properties\"] elif \"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:])", "= prop_type + \" object\" example_ += f'\"{prop_type}\"' elif \"$ref\"", "= ref[-1] + \" object\" if ref not in item_list:", "+= f'<tr>\\n<td width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div", "type_ + \" Enum\" description_ += f' Options: {str(array_obj[\"enum\"])}' example_", "return writeUp else: component = requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component] i", "for code, properties in request_body.items(): if code == \"200\": html_file.write('<h4>200", "\"/\" elif type_ == \"integer\" or type_ == \"number\": example_", "yaml documentations = {\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\": \"QuantConnect-Alpha-0.8.yaml\"} def", "html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j += 1 # Create Request part with", "i += 1 continue elif \"properties\" in request_object: request_object_properties =", "in type_: example_ += \"\\n\" + tab + \" ]\"", "description_ += f' Minimum: {item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"] elif \"maximum\"", "colspan=\"2\"><code>{item}</code> Model - {request_object[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>' for y in", "\"object\": if \"additionalProperties\" in properties: add_prop = properties[\"additionalProperties\"] if \"type\"", "if \"type\" in properties else \"object\" description_ = properties[\"description\"] if", "not exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) # Create Introduction", "if type_ == \"array\": array_obj = item[\"schema\"][\"items\"] if \"$ref\" in", "if \"description\" in request_object: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model -", "False order -= 1 for line in html_property: writeUp +=", "order=order+2) example_ += tab + \" \" * 2 +", "ExampleWriting(request_object_properties_, [], 1) if \"type\" in array_obj: type_ = array_obj[\"type\"]", "\" if \"required\" not in item or not item[\"required\"] else", "example_ += tab + f' \"{properties[\"items\"][\"type\"]}\"' elif \"$ref\" in properties[\"items\"]:", "example_ += \"0\" elif type_ == \"boolean\": example_ += \"true\"", "\"$ref\" in properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1] +", "else: type_ = prop_type + \" object\" example_ += f'\"{prop_type}\"'", "[] for name, properties in request_object_properties.items(): type_ = properties[\"type\"] if", "in properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \"", "component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True order += 1 else:", "request_object_ else \"/\" elif type_ == \"integer\" or type_ ==", "width=\"20%\">value</td> <td> <code>{component[\"items\"][\"type\"]}</code> <br/>/</td>\\n</tr>\\n' writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n'", "= \"{\\n\" line = [] for name, properties in request_object_properties.items():", "!= \"{\\n\" and not array) or (example != \"[\\n {\\n\"", "request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"properties\" in request_object:", "text = '\"' + request_object[\"enum\"][0] + '\"' example += f'\\n", "description_ += f' Maximum: {item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"] elif \"default\"", "= properties[\"additionalProperties\"] if \"type\" in add_prop: prop_type = add_prop[\"type\"] if", "= ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif \"type\" in", "writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else: component = requestBody[\"$ref\"].split(\"/\")[1:] item_list", "example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += \"0\" else: type_ =", "f' Minimum: {item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"] elif \"maximum\" in item[\"schema\"]:", "request_object[\"description\"] if description_[-1] != \".\": description_ += \".\" writeUp +=", "\"{\\n\" and not array) or (example != \"[\\n {\\n\" and", "'\"' + request_object[\"enum\"][0] + '\"' example += f'\\n {text},' example", "writeUp = ResponseTable(content[\"requestBody\"]) else: writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp", "+= 1 continue elif \"type\" in request_object and \"properties\" not", "missing, invalid, or timestamp is too old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td", "\"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n') j += 1 #", "in request_object: enum = \" Options: \" + str(request_object[\"enum\"]) description_", "open(destination_folder / f'{j:02} Description.html', \"w\") as html_file: html_file.write('<p>\\n') html_file.write(f'{content[\"description\"]}\\n') html_file.write('</p>\\n')", "= content[\"responses\"] for code, properties in request_body.items(): if code ==", "elif \"$ref\" in add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1]", "type_: example_ = 'true' writeUp += f'\\n<tr>\\n<td width=\"20%\">{item[\"name\"]}</td> <td> <code>{type_}</code><br/>{description_}</td>\\n</tr>'", "add_prop[\"type\"] if \"format\" in prop_type: type_ = prop_type + f'$({prop_type[\"format\"]})'", "def ResponseTable(requestBody): writeUp = \"\" array = False order =", "hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n') continue elif code", "= {item: request_object} writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' if \"description\"", "'\\b\\n ]\\n]' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i +=", "item[\"required\"] else \"\" description_ += item[\"description\"] if description_[-1] != \".\":", "and array): example += \",\\n\" example_ = tab + f'", "section-example-container\"><pre>\\n' writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp", "destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) # Create Introduction part with", "in result else result[\"get\"] # Create path if not exist", "= request_object[\"example\"] elif \"enum\" in request_object: text = '\"' +", "= request_object_[path] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] example_,", "+ \" Array\" ref = array_obj[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] +", "+= '\\b\\n ]\\n]' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i", "\"parameters\" in content: writeUp = RequestTable(api_call, content[\"parameters\"]) elif \"requestBody\" in", "+ \" \" * 2 + write_up elif type_ ==", "example_ = '\"string\"' elif \"number\" in type_ or \"integer\" in", "class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp += f'</tr>\\n<td><code>{api_call}</code>", "= str(content[\"requestBody\"][\"description\"]) if description_[-1] != \".\": description_ += \".\" description_", "eg + '\"' example_ = tab + f' \"{name}\": {eg}'", "description_ += \".\" if \"type\" in item[\"schema\"]: type_ = item[\"schema\"][\"type\"]", "\"{properties[\"enum\"][0]}\"' else: example_ = tab + f' \"{name}\": {properties[\"enum\"][0]}' if", "+= '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"2\"><code>{item}</code> Model -", "add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\" if ref not", "example += f'\\n {text},' example += '\\b\\n ]\\n]' writeUp +=", "= open(source) doc = yaml.load(yaml_file, Loader=yaml.Loader) paths = doc[\"paths\"] for", "if \"description\" in content: with open(destination_folder / f'{j:02} Description.html', \"w\")", "Request part with open(destination_folder / f'{j:02} Request.html', \"w\") as html_file:", "item[\"schema\"][\"$ref\"].split(\"/\")[-1] if \"minimum\" in item[\"schema\"]: description_ += f' Minimum: {item[\"schema\"][\"minimum\"]}'", "-= 1 for line in html_property: writeUp += line writeUp", "while i < len(item_list): request_object = doc for item in", "= add_prop[\"type\"] if \"format\" in prop_type: type_ = prop_type +", "order == 0 or array: line.append(f'<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{type_}</code> <br/>", "1 continue elif \"properties\" in request_object: request_object_properties = request_object[\"properties\"] elif", "in request_object: request_object_properties = {item: request_object} writeUp += '<table class=\"table", "request_object_ = doc for item in ref: request_object_ = request_object_[item]", "ExampleWriting(request_object_properties, item_list, array, order) if array: array = False order", "f'{j:02} Introduction.html', \"w\") as html_file: html_file.write(\"<p>\\n\") html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j +=", "in request_object_: properties = request_object_properties_ = request_object_ type_ = request_object_[\"type\"]", "\"object\" description_ = properties[\"description\"] if \"description\" in properties else \"/\"", "+= \".\" if \"enum\" in properties: type_ += \" Enum\"", "if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] write_up, __, item_list", "qc-table\">\\n<thead>\\n<tr>\\n') html_file.write('<th colspan=\"2\"><code>UnauthorizedError</code> Model - Unauthorized response from the API.", "+ \"\\n\" + tab + \"}\\n\" + \" \" *", "code == \"404\": html_file.write('<h4>404 Not Found Error</h4>\\n') html_file.write('<p>The requested item,", "the following format:\\n' html_file.write(\"<p>\\n\" + description_ + \"</p>\\n\") html_file.write(writeUp) j", "+ tab + \" ]\" if order == 0 or", "request_object = request_object[item] if \"items\" in request_object and \"oneOf\" in", "properties in request_body.items(): if code == \"200\": html_file.write('<h4>200 Success</h4>\\n') elif", "request_object: enum = \" Options: \" + str(request_object[\"enum\"]) description_ =", "Create Introduction part with open(destination_folder / f'{j:02} Introduction.html', \"w\") as", "if not array: return example + \"\\n\" + tab +", "= properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\" if ref", "+ \"]\", line, item_list for section, source in documentations.items(): yaml_file", "\"{name}\": \"{properties[\"enum\"][0]}\"' else: example_ = tab + f' \"{name}\": {properties[\"enum\"][0]}'", "html_file.write(f'The <code>{api_call}</code> API provides a response in the following format:\\n')", "+= f' Minimum: {item[\"schema\"][\"minimum\"]}' example_ = item[\"schema\"][\"minimum\"] elif \"maximum\" in", "+= f'(${properties[\"format\"]})' example_ += \"2021-11-26T15:18:27.693Z\" else: example_ += '\"string\"' if", "== \"number\": example_ += \"0\" elif type_ == \"boolean\": example_", "for api_call, result in paths.items(): j = 1 content =", "type_ = properties[\"items\"][\"type\"] + \" Array\" example_ += tab +", "in properties[\"items\"]: type_ = properties[\"items\"][\"type\"] + \" Array\" example_ +=", "having one if \"description\" in content: with open(destination_folder / f'{j:02}", "{eg}</sub></i>' if isinstance(eg, str): eg = '\"' + eg +", "properties: ref = properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\"", "response in the following format:\\n') html_file.write('</p>\\n') request_body = content[\"responses\"] for", "properties else \"/\" if (example != \"{\\n\" and not array)", "= request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+2) example_", "item in item_list[i]: request_object = request_object[item] if \"items\" in request_object", "writeUp += line writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp", "for y in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1 continue elif", "if \"description\" in request_object_ else \"/\" elif type_ == \"integer\"", "html_file.write(f\"{content['summary']}\\n\") html_file.write(\"</p>\\n\") j += 1 # Create Description part if", "request_object: request_object_properties = request_object[\"properties\"] elif \"content\" in request_object: item_list.append(request_object[\"content\"][\"application/json\"][\"schema\"][\"$ref\"].split(\"/\")[1:]) i", "= request_object_[item] if \"properties\" in request_object_: request_object_properties_ = request_object_[\"properties\"] description_", "description_ + \"</p>\\n\") html_file.write(writeUp) j += 1 # Create Response", "line writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += example", "\"QuantConnect-Alpha-0.8.yaml\"} def RequestTable(api_call, params): writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp", "+= 1 # Create Request part with open(destination_folder / f'{j:02}", "\"$ref\" in array_obj: type_ = array_obj[\"$ref\"].split(\"/\")[-1] + \" Array\" ref", "request_object_properties_ = request_object_ type_ = request_object_[\"type\"] description_ = request_object_[\"description\"] if", "f' \"{name}\": {eg}' if \"Array\" in type_: example_ += \"\\n\"", "= ExampleWriting(request_object_properties_, [], 1) if \"type\" in array_obj: type_ =", "= request_object_[\"description\"] if \"description\" in request_object_ else \"/\" elif type_", "\"0\" elif type_ == \"boolean\": example_ += \"true\" elif type_", "if description_[-1] != \".\": description_ += \".\" description_ += \"", "\"type\" in add_prop: prop_type = add_prop[\"type\"] if \"format\" in prop_type:", "\"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp = \"\" array = False order", "code == \"default\": html_file.write('<h4>Default Generic Error</h4>\\n') writeUp = ResponseTable(properties) html_file.write(writeUp)", "Create Request part with open(destination_folder / f'{j:02} Request.html', \"w\") as", "= '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n{\\n' for item in params: example_", "f'\"{prop_type}\"' elif \"$ref\" in add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:] type_ =", "<td> <code>{type_}</code><br/>{description_}</td>\\n</tr>' example += f' \"{item[\"name\"]}\": {example_},\\n' return writeUp +", "+= \" Enum\" description_ += f' Options : {properties[\"enum\"]}' if", "requestBody[\"content\"][\"application/json\"][\"schema\"] if \"$ref\" in component: component = component[\"$ref\"].split(\"/\")[1:] elif \"items\"", "content[\"parameters\"]) elif \"requestBody\" in content: if \"description\" in content[\"requestBody\"]: description_", "array: example = \"[\\n {\\n\" else: example = \"{\\n\" line", "in item[\"schema\"]: description_ += f' Maximum: {item[\"schema\"][\"maximum\"]}' example_ = item[\"schema\"][\"maximum\"]", "request_body = content[\"responses\"] for code, properties in request_body.items(): if code", "line in html_property: writeUp += line writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div", "description_ += f' Options : {properties[\"enum\"]}' if \"string\" in type_:", "'</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 continue elif \"oneOf\" in request_object: for", "+ f' \"{name}\": \"{properties[\"enum\"][0]}\"' else: example_ = tab + f'", "Create path if not exist destination_folder = pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True)", "f' \"{name}\": ' if type_ == \"array\": example_ += '[\\n'", "1 for line in html_property: writeUp += line writeUp +=", "\" object\" if ref not in item_list: item_list.append(ref) request_object_ =", "tab + f' \"{name}\": {eg}' if \"Array\" in type_: example_", "writeUp += f'\\n<tr>\\n<td width=\"20%\">{name}</td> <td> <code>{request_object[\"type\"]}</code> <br/> {description_ + enum}</td>\\n</tr>\\n'", "\"properties\" in request_object: request_object_properties = request_object[\"properties\"] elif \"content\" in request_object:", "+ \"\\n\" + tab + \"}\", line, item_list return example", "as html_file: description_ = \"\" if \"parameters\" in content: writeUp", "{str(array_obj[\"enum\"])}' example_ = f'\"{array_obj[\"enum\"][0]}\"' if \"Enum\" not in type_: if", "elif \"$ref\" in properties: ref = properties[\"$ref\"].split(\"/\")[1:] type_ = ref[-1]", "{example_},\\n' return writeUp + example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def ResponseTable(requestBody): writeUp", "f'<br/><i><sub>example: {eg}</sub></i>' if isinstance(eg, str): eg = '\"' + eg", "if description_[-1] != \".\": description_ += \".\" if \"enum\" in", "and \"properties\" not in request_object: request_object_properties = {item: request_object} writeUp", "timestamp is too old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code>", "width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]' writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>'", "\"$ref\" in component[\"items\"]: component = component[\"items\"][\"$ref\"].split(\"/\")[1:] array = True order", "'</tr>\\n</thead>' for y in prop: path = y[\"$ref\"].split(\"/\")[1:] name =", "f' \"{item[\"name\"]}\": {example_},\\n' return writeUp + example + \"\\b}</pre>\\n</div>\\n</td>\\n</tr>\\n</table>\" def", "'<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n' writeUp += f'<th colspan=\"1\"><code>{api_call}</code> Method</th>\\n</tr>\\n</thead>\\n' writeUp +=", "'<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>'", "if \"description\" in properties else \"/\" if (example != \"{\\n\"", "+= f'\"{prop_type}\"' elif \"$ref\" in add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:] type_", "if \"format\" in prop_type: type_ = prop_type + f'$({prop_type[\"format\"]})' +", "writeUp else: component = requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component] i =", "doc = yaml.load(yaml_file, Loader=yaml.Loader) paths = doc[\"paths\"] for api_call, result", "tab + \"}\\n\" + \" \" * (order-1) + \"]\",", "write_up elif \"$ref\" in properties: ref = properties[\"$ref\"].split(\"/\")[1:] type_ =", "item_list[i]: request_object = request_object[item] if \"items\" in request_object and \"oneOf\"", "for path in ref: request_object_ = request_object_[path] if \"properties\" in", "properties[\"items\"]: ref = properties[\"items\"][\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" Array\"", "pathlib.Path(\"/\".join(content[\"tags\"])) destination_folder.mkdir(parents=True, exist_ok=True) # Create Introduction part with open(destination_folder /", "example += example_ if not array: return example + \"\\n\"", "example writeUp += '</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' i += 1 continue elif \"oneOf\"", "description_ += f' Default: {item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"] if type_", "= request_object_properties_ = request_object_ type_ = request_object_[\"type\"] description_ = request_object_[\"description\"]", "<br/> {description_ + enum}</td>\\n</tr>\\n' if \"example\" in request_object: text =", "return example + \"\\n\" + tab + \"}\", line, item_list", "too old for hash.</th>\\n') html_file.write('</tr>\\n</thead>\\n<tr>\\n<td width=\"20%\">www_authenticate</td> <td> <code>string</code> <br/> Header</td>\\n</tr>\\n</table>\\n')", "1 return writeUp def ExampleWriting(request_object_properties, item_list, array=False, order=0): tab =", "\"array\": example_ += '[\\n' if \"type\" in properties[\"items\"]: type_ =", "'[\\n' if \"type\" in properties[\"items\"]: type_ = properties[\"items\"][\"type\"] + \"", "request_object_properties_ = request_object_[\"properties\"] description_ = request_object_[\"description\"] if \"description\" in request_object_", "{item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"] if type_ == \"array\": array_obj =", ": {properties[\"enum\"]}' if \"string\" in type_: example_ = tab +", "request_object: text = request_object[\"example\"] elif \"enum\" in request_object: text =", "request_body.items(): if code == \"200\": html_file.write('<h4>200 Success</h4>\\n') elif code ==", "description_ = request_object[\"description\"] if description_[-1] != \".\": description_ += \".\"", "f'<th colspan=\"2\">{requestBody[\"description\"]}</th>\\n' writeUp += '</tr>\\n</thead>\\n' writeUp += f'<tr>\\n<td width=\"20%\">value</td> <td>", "True order += 1 else: writeUp += '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n'", "in properties else \"object\" description_ = properties[\"description\"] if \"description\" in", "prop_type + \" object\" example_ += f'\"{prop_type}\"' elif \"$ref\" in", "item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif \"$ref\"", "type_: example_ = '\"string\"' elif \"number\" in type_ or \"integer\"", "section, source in documentations.items(): yaml_file = open(source) doc = yaml.load(yaml_file,", "'</pre>\\n</div>\\n</td>\\n</tr>\\n</table>' return writeUp else: component = requestBody[\"$ref\"].split(\"/\")[1:] item_list = [component]", "\"type\" in array_obj: type_ = array_obj[\"type\"] + \" Array\" if", "= add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\" if ref", "if \"additionalProperties\" in properties: add_prop = properties[\"additionalProperties\"] if \"type\" in", "\"minimum\" in item[\"schema\"]: description_ += f' Minimum: {item[\"schema\"][\"minimum\"]}' example_ =", "pathlib import yaml documentations = {\"Our Platform\": \"QuantConnect-Platform-2.0.0.yaml\", \"Alpha Streams\":", "in request_object[\"oneOf\"]: item_list.append(y[\"$ref\"].split(\"/\")[1:]) i += 1 continue elif \"properties\" in", "request_object_[\"properties\"] write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+2) example_ +=", "return example + \"\\n\" + tab + \"}\\n\" + \"", "part with open(destination_folder / f'{j:02} Introduction.html', \"w\") as html_file: html_file.write(\"<p>\\n\")", "\" writeUp = ResponseTable(content[\"requestBody\"]) else: writeUp = '<table class=\"table qc-table\">\\n<thead>\\n<tr>\\n'", "f' Default: {item[\"schema\"][\"default\"]}' example_ = item[\"schema\"][\"default\"] if type_ == \"array\":", "# Create Response part with open(destination_folder / f'{j:02} Responses.html', \"w\")", "if \"$ref\" in component: component = component[\"$ref\"].split(\"/\")[1:] elif \"items\" in", "else \"/\" write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_", "{\\n\" else: example = \"{\\n\" line = [] for name,", "type_ = request_object_[\"type\"] description_ = request_object_[\"description\"] if \"description\" in request_object_", "writeUp += '<tr>\\n<td width=\"20%\">Example</td>\\n<td>\\n<div class=\"cli section-example-container\"><pre>\\n' writeUp += f'[\\n \"{component[\"items\"][\"example\"]}\"\\n]'", "array: return example + \"\\n\" + tab + \"}\", line,", "request_object_[\"description\"] if \"description\" in request_object_ else \"/\" write_up, __, item_list", "content[\"requestBody\"]: description_ = str(content[\"requestBody\"][\"description\"]) if description_[-1] != \".\": description_ +=", "if \"type\" in add_prop: prop_type = add_prop[\"type\"] if \"format\" in", "\"/\" write_up, __, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_ +=", "f'<th colspan=\"2\"><code>{item_list[i][-1]}</code> Model - {request_object[\"description\"]}</th>\\n' else: writeUp += f'<th colspan=\"2\"><code>{item_list[i][-1]}</code>", "if \"items\" in request_object and \"oneOf\" in request_object[\"items\"]: prop =", "__, item_list = ExampleWriting(request_object_properties_, item_list, order=order+1) example_ += write_up elif", "type_ == \"string\": if \"format\" in properties: type_ += f'(${properties[\"format\"]})'", "elif \"properties\" in request_object: request_object_properties = request_object[\"properties\"] elif \"content\" in", "= y[\"$ref\"].split(\"/\")[1:] name = path[-1] enum = \"\" item_list.append(path) request_object", "content: with open(destination_folder / f'{j:02} Description.html', \"w\") as html_file: html_file.write('<p>\\n')", "\"default\": html_file.write('<h4>Default Generic Error</h4>\\n') writeUp = ResponseTable(properties) html_file.write(writeUp) print(f\"Documentation of", "add_prop: ref = add_prop[\"$ref\"].split(\"/\")[1:] type_ = ref[-1] + \" object\"" ]
[ "authorized class Hubs: @authorized def getHubs(self): url = self.api_url +", "class Hubs: @authorized def getHubs(self): url = self.api_url + '/project/v1/hubs'", "'/project/v1/hubs/%s' % hub_id headers = { 'Authorization': '%s %s' %", "import get_request, authorized class Hubs: @authorized def getHubs(self): url =", "%s' % (self.token_type, self.access_token) } return get_request(url, headers) @authorized def", "get_request(url, headers) @authorized def getHub(self, hub_id): url = self.api_url +", "@authorized def getHub(self, hub_id): url = self.api_url + '/project/v1/hubs/%s' %", "% (self.token_type, self.access_token) } return get_request(url, headers) @authorized def getHub(self,", "'Authorization': '%s %s' % (self.token_type, self.access_token) } return get_request(url, headers)", "getHub(self, hub_id): url = self.api_url + '/project/v1/hubs/%s' % hub_id headers", "self.access_token) } return get_request(url, headers) @authorized def getHub(self, hub_id): url", "self.api_url + '/project/v1/hubs/%s' % hub_id headers = { 'Authorization': '%s", "from .utils import get_request, authorized class Hubs: @authorized def getHubs(self):", "} return get_request(url, headers) @authorized def getHub(self, hub_id): url =", ".utils import get_request, authorized class Hubs: @authorized def getHubs(self): url", "headers = { 'Authorization': '%s %s' % (self.token_type, self.access_token) }", "return get_request(url, headers) @authorized def getHub(self, hub_id): url = self.api_url", "+ '/project/v1/hubs/%s' % hub_id headers = { 'Authorization': '%s %s'", "{ 'Authorization': '%s %s' % (self.token_type, self.access_token) } return get_request(url,", "'%s %s' % (self.token_type, self.access_token) } return get_request(url, headers) @authorized", "url = self.api_url + '/project/v1/hubs/%s' % hub_id headers = {", "= self.api_url + '/project/v1/hubs' headers = { 'Authorization': '%s %s'", "'/project/v1/hubs' headers = { 'Authorization': '%s %s' % (self.token_type, self.access_token)", "getHubs(self): url = self.api_url + '/project/v1/hubs' headers = { 'Authorization':", "def getHub(self, hub_id): url = self.api_url + '/project/v1/hubs/%s' % hub_id", "@authorized def getHubs(self): url = self.api_url + '/project/v1/hubs' headers =", "% hub_id headers = { 'Authorization': '%s %s' % (self.token_type,", "def getHubs(self): url = self.api_url + '/project/v1/hubs' headers = {", "url = self.api_url + '/project/v1/hubs' headers = { 'Authorization': '%s", "headers) @authorized def getHub(self, hub_id): url = self.api_url + '/project/v1/hubs/%s'", "hub_id): url = self.api_url + '/project/v1/hubs/%s' % hub_id headers =", "= self.api_url + '/project/v1/hubs/%s' % hub_id headers = { 'Authorization':", "(self.token_type, self.access_token) } return get_request(url, headers) @authorized def getHub(self, hub_id):", "hub_id headers = { 'Authorization': '%s %s' % (self.token_type, self.access_token)", "+ '/project/v1/hubs' headers = { 'Authorization': '%s %s' % (self.token_type,", "get_request, authorized class Hubs: @authorized def getHubs(self): url = self.api_url", "Hubs: @authorized def getHubs(self): url = self.api_url + '/project/v1/hubs' headers", "self.api_url + '/project/v1/hubs' headers = { 'Authorization': '%s %s' %", "= { 'Authorization': '%s %s' % (self.token_type, self.access_token) } return" ]
[ "from .views import BaseIndexView urlpatterns = [ # url(r'^$', BaseIndexView.as_view(),", "url # from .views import BaseIndexView urlpatterns = [ #", "from django.conf.urls import url # from .views import BaseIndexView urlpatterns", "import BaseIndexView urlpatterns = [ # url(r'^$', BaseIndexView.as_view(), name=\"index\"), ]", "# from .views import BaseIndexView urlpatterns = [ # url(r'^$',", ".views import BaseIndexView urlpatterns = [ # url(r'^$', BaseIndexView.as_view(), name=\"index\"),", "django.conf.urls import url # from .views import BaseIndexView urlpatterns =", "import url # from .views import BaseIndexView urlpatterns = [" ]
[ "''' Palo Alto Networks create_loadable_configs.py Provides rendering of configuration templates", "path for the template file :param render_type: type if full", "type if full or set commands; aligns with folder name", "administrator account password: \") password2 = getpass.getpass(\"Enter password again to", "import md5_crypt from passlib.hash import sha256_crypt from passlib.hash import sha512_crypt", "= 'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename, template_path, render_type, context) template_save(filename, myconfig_path,", "NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR", "with folder name :param context: dict of variables to render", "def des_hash(txt): ''' Returns the DES Hashed secret for use", "subdir located in loadable_configs dir myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time']) #", "conf files print('\\nworking with {0} config template'.format(render_type)) if render_type ==", "and configuration information. Suitable to place in the phash field", "for the superuser password to create a phash and store", "def create_context(config_var_file): # read the metafile to get variables and", "import datetime import os import shutil import sys import time", "AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE", "output configs with jinja2 inputs are read from the template", "password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch = <PASSWORD> else: print('\\nPasswords do", "{} # archive_time used as part of the my_config directory", "''' Returns the SHA256 Hashed secret for use as a", "place in the phash field in the configurations ''' return", "again.\\n') # loop through all config types that have their", "found in {0}'.format(myconfig_path)) print('along with the metadata values used to", "a new one # then create snippets and full sub-directories", "to place in the phash field in the configurations '''", "time import getpass import oyaml from jinja2 import Environment, FileSystemLoader", "of folder name :return: the myconfigdir full path name '''", "rendering the template save to the myconfig directory each run", "the SHA256 Hashed secret for use as a password hash", "the configurations ''' return sha256_crypt.hash(txt) def sha512_hash(txt): ''' Returns the", "phash field in the configurations ''' return sha256_crypt.hash(txt) def sha512_hash(txt):", "files print('\\nworking with {0} config template'.format(render_type)) if render_type == 'full':", "filters here, see the function defs below for reference env.filters['md5_hash']", "name from the my_variables.py file :param foldertime: datetime when script", "value returned (future could be success code) ''' print('..saving template", ":return: password hash of the string with salt and configuration", "template_path, render_type, context) template_save(filename, myconfig_path, config_type, element) print('\\nconfigs have been", ":param filename: name of the template file :param template_path: path", "myconfig_path, config_type, element) print('\\nconfigs have been created and can be", "= '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir, mode=0o755)", "myconfigdir_name, foldertime) if os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new", "password hash of the string with salt and configuration information.", "to create a phash and store in the my_config files;", "os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new archive folder {0}-{1}'.format(myconfigdir_name,", "password again to verify: \") if password1 == password2: input_var['ADMINISTRATOR_PASSWORD']", "print('created new loadable config directory') # check that configs folder", "snippets; aligns with folder name :return: no value returned (future", "we want (panos / panorama) template_path = os.path.abspath(os.path.join('..', 'templates', config_type))", "username to be added into the configuration; no default admin/admin", "of configuration templates with user defined values Output is a", "username: ') print('\\na phash will be created for superuser {0}", "then run the script This software is provided without support,", "aligns with folder name :param context: dict of variables to", "DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF", "env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load our custom jinja filters", "the myconfigdir full path name ''' # get the full", "# update context dict with variables from user input for", "folder naming ''' config_variables = 'config_variables.yaml' # create dict of", "prefix name of the output directory input_var['output_dir'] = input('Enter the", "my_config directory name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for folder", "get the input variables and render the output configs with", "the prefix name of the output directory input_var['output_dir'] = input('Enter", "print('along with the metadata values used to render the configs\\n')", "= getpass.getpass(\"Enter password again to verify: \") if password1 ==", "values and convert to key-based dictionary jinja_context = dict() for", "config_type)) print('created new subdirectories for {0}'.format(config_type)) return myconfigdir def create_context(config_var_file):", "then create snippets and full sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name,", "ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA", "and full sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if os.path.isdir(myconfigdir)", "notice and this permission notice appear in all copies. #", "suffix of folder name :return: the myconfigdir full path name", "panorama to read/write to the respective directories :param archivetime: datetimestamp", "myconfigdir full path name ''' # get the full path", "md5_hash(txt): ''' Returns the MD5 Hashed secret for use as", "template_render(filename, template_path, render_type, context) template_save(filename, myconfig_path, config_type, element) print('\\nconfigs have", "check that configs folder exists and if not create a", "= input('Enter the name of the output directory: ') #", "import os import shutil import sys import time import getpass", "create a unique folder name print('=' * 80) print(' ')", "OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE", "folder exists and if not create a new one #", "OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE", "foldertime): ''' create a new main loadable_configs folder if required", "config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new subdirectories for {0}'.format(config_type))", "'{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst) return # define functions for custom", "Hashed secret for use as a password hash in the", "while passwordmatch is False: password1 = getpass.getpass(\"Enter the superuser administrator", "in ['panos', 'panorama']: for render_type in ['full', 'set_commands']: replace_variables(config_type, render_type,", "<PASSWORD> else: print('\\nPasswords do not match. Please try again.\\n') #", "from passlib.hash import des_crypt from passlib.hash import md5_crypt from passlib.hash", "this software for any # purpose with or without fee", "in the PanOS configuration :param txt: text to be hashed", "folder name :param context: dict of variables to render :return:", "the myconfig directory each run saves with a unique prefix", "# this prompts for the prefix name of the output", "''' return sha512_crypt.hash(txt) def replace_variables(config_type, render_type, input_var): ''' get the", "return sha512_crypt.hash(txt) def replace_variables(config_type, render_type, input_var): ''' get the input", "'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type) if render_type == 'set_commands': filename =", "filename = 'iron_skillet_{0}_full.xml'.format(config_type) if render_type == 'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type)", "jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context def template_render(filename, template_path, render_type, context):", "new loadable config directory') # check that configs folder exists", "or panorama :param element: xml element rendered based on input", "import Environment, FileSystemLoader from passlib.hash import des_crypt from passlib.hash import", "\") password2 = getpass.getpass(\"Enter password again to verify: \") if", "to key-based dictionary jinja_context = dict() for snippet_var in variables['variables']:", "this permission notice appear in all copies. # # THE", "own risk. ''' import datetime import os import shutil import", "that configs folder exists and if not create a new", "rendered based on input variables; used as folder name :param", "commands; aligns with folder name :param context: dict of variables", "the my_template folder var_file = 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is", "field in the configurations ''' return sha512_crypt.hash(txt) def replace_variables(config_type, render_type,", "the output configs with jinja2 inputs are read from the", "input('Enter the superuser administrator account username: ') print('\\na phash will", "the template file :param render_type: type if full or set", "DES Hashed secret for use as a password hash in", "for config_type in ['panos', 'panorama']: for render_type in ['full', 'set_commands']:", "superuser {0} and added to the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch", "md5_crypt.hash(txt) def des_hash(txt): ''' Returns the DES Hashed secret for", "def md5_hash(txt): ''' Returns the MD5 Hashed secret for use", "Networks create_loadable_configs.py Provides rendering of configuration templates with user defined", "is hereby granted, provided that the above # copyright notice", "''' # get the full path to the config directory", "code) ''' print('..saving template for {0}'.format(snippet_name)) filename = snippet_name with", "(panos / panorama) template_path = os.path.abspath(os.path.join('..', 'templates', config_type)) # append", "sha512_crypt defined_filters = ['md5_hash', 'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime): '''", "software is provided without support, warranty, or guarantee. Use at", "set conf files print('\\nworking with {0} config template'.format(render_type)) if render_type", "module lookup sys.path.append(template_path) # output subdir located in loadable_configs dir", "used as part of the my_config directory name input_var['archive_time'] =", "False: password1 = getpass.getpass(\"Enter the superuser administrator account password: \")", "sys import time import getpass import oyaml from jinja2 import", "input variables; used as folder name :param render_type: type eg.", ":param element: xml element rendered based on input variables; used", "# Use the timestamp to create a unique folder name", "config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False # prompt for the", "on input variables; used as folder name :param render_type: type", "full or snippets; aligns with folder name :return: no value", "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS", "WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # Author:", "myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render full and set conf", "des_crypt from passlib.hash import md5_crypt from passlib.hash import sha256_crypt from", "CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF #", "we want (panos / panorama) myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs')) if", "all config types that have their respective template folders for", "the my_config directory name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for", "config_type in ['panos', 'panorama']: for render_type in ['full', 'set_commands']: replace_variables(config_type,", "80) input_var = {} # archive_time used as part of", "output file :param myconfigdir: path to the my_config directory :param", "render :return: return the rendered xml file and set conf", "account username: ') print('\\na phash will be created for superuser", "configuration information. Suitable to place in the phash field in", "render the jinja template using the context value from config_variables.yaml", "the my_config directory :param config_type: based on initial run list;", "element = template_render(filename, template_path, render_type, context) template_save(filename, myconfig_path, config_type, element)", "new one # then create snippets and full sub-directories myconfigdir", "will be created for superuser {0} and added to the", "input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch = <PASSWORD> else: print('\\nPasswords do not", "OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM", "the full path to the output directory we want (panos", "['panos', 'panorama']: for render_type in ['full', 'set_commands']: replace_variables(config_type, render_type, input_var)", "phash and store in the my_config files; no default admin/admin", "config_variables.yaml values and then run the script This software is", "notice appear in all copies. # # THE SOFTWARE IS", "warranty, or guarantee. Use at your own risk. ''' import", "def replace_variables(config_type, render_type, input_var): ''' get the input variables and", "is False: os.mkdir(myconfigpath, mode=0o755) print('created new loadable config directory') #", "read/write to the respective directories :param archivetime: datetimestamp used for", "eg. if full or snippets; aligns with folder name :return:", "# get the full path to the config directory we", "''' print('..creating template for {0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) #", "# load our custom jinja filters here, see the function", "return # define functions for custom jinja filters def md5_hash(txt):", "script run; to be used as suffix of folder name", "is False: vfilesrc = var_file vfiledst = '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc,", "for the render into the my_template folder var_file = 'loadable_config_vars/config_variables.yaml'", "password1 = getpass.getpass(\"Enter the superuser administrator account password: \") password2", "THE USE OR PERFORMANCE OF THIS SOFTWARE. # Author: <NAME>", "element: xml element rendered based on input variables; used as", "with {0} config template'.format(render_type)) if render_type == 'full': filename =", "md5_hash env.filters['des_hash'] = des_hash env.filters['sha512_hash'] = sha512_hash template = env.get_template(filename)", "for snippet_var in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context def", "template save to the myconfig directory each run saves with", "rendered xml file and set conf file ''' print('..creating template", "ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES", "LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES", "the metadata values and convert to key-based dictionary jinja_context =", "name :param context: dict of variables to render :return: return", "path to the output directory we want (panos / panorama)", "print('created new subdirectories for {0}'.format(config_type)) return myconfigdir def create_context(config_var_file): #", "the string with salt and configuration information. Suitable to place", "config_variables = 'config_variables.yaml' # create dict of values for the", "# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY", "want (panos / panorama) myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath)", "sys.path.append(template_path) # output subdir located in loadable_configs dir myconfig_path =", "use, copy, modify, and/or distribute this software for any #", "set commands for Panos and Panorama Edit the config_variables.yaml values", "variables and values try: with open(config_var_file, 'r') as var_metadata: variables", "Palo Alto Networks create_loadable_configs.py Provides rendering of configuration templates with", "based on input variables; used as folder name :param render_type:", "as a password hash in the PanOS configuration :param txt:", "input_var = {} # archive_time used as part of the", "template.render(context) return rendered_template def template_save(snippet_name, myconfigdir, config_type, element): ''' after", "getpass.getpass(\"Enter the superuser administrator account password: \") password2 = getpass.getpass(\"Enter", "return rendered_template def template_save(snippet_name, myconfigdir, config_type, element): ''' after rendering", "using the context value from config_variables.yaml :param filename: name of", "the superuser password to create a phash and store in", "without fee is hereby granted, provided that the above #", "with or without fee is hereby granted, provided that the", "'templates', config_type)) # append to the sys path for module", "see the function defs below for reference env.filters['md5_hash'] = md5_hash", "output to my_config :param config_type: panos or panorama to read/write", "import sys import time import getpass import oyaml from jinja2", "full sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if os.path.isdir(myconfigdir) is", "set of loadable full configurations and set commands for Panos", "of the my_config directory name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used", "in the phash field in the configurations ''' return sha512_crypt.hash(txt)", "is a set of loadable full configurations and set commands", "based on initial run list; eg. panos or panorama :param", "lookup sys.path.append(template_path) # output subdir located in loadable_configs dir myconfig_path", "= template_render(filename, template_path, render_type, context) template_save(filename, myconfig_path, config_type, element) print('\\nconfigs", "success code) ''' print('..saving template for {0}'.format(snippet_name)) filename = snippet_name", "replace_variables(config_type, render_type, input_var): ''' get the input variables and render", "superuser password to create a phash and store in the", "Panorama Edit the config_variables.yaml values and then run the script", "context value from config_variables.yaml :param filename: name of the template", "des_hash env.filters['sha512_hash'] = sha512_hash template = env.get_template(filename) rendered_template = template.render(context)", "configfile.write(element) # copy the variables file used for the render", "template file :param template_path: path for the template file :param", "the phash field in the configurations ''' return sha256_crypt.hash(txt) def", "directory input_var['output_dir'] = input('Enter the name of the output directory:", "subdirectories for configs :param myconfigdir_name: prefix folder name from the", "CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. #", "foldertime: datetime when script run; to be used as suffix", "False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime)) if", "Alto Networks # # Permission to use, copy, modify, and/or", "variables; used as folder name :param render_type: type eg. if", "DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER", "DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR", "provided that the above # copyright notice and this permission", "/ panorama) template_path = os.path.abspath(os.path.join('..', 'templates', config_type)) # append to", "part of the my_config directory name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime", "ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO", "# THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR", "render into the my_template folder var_file = 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir,", "that have their respective template folders for config_type in ['panos',", "# Permission to use, copy, modify, and/or distribute this software", "input_var['output_dir'] = input('Enter the name of the output directory: ')", "'{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated", "= '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst) return # define functions for", "been created and can be found in {0}'.format(myconfig_path)) print('along with", "modify, and/or distribute this software for any # purpose with", "for folder creation: {0}\\n'.format(input_var['archive_time'])) # this prompts for the prefix", "# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,", "appear in all copies. # # THE SOFTWARE IS PROVIDED", "WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED", "def sha256_hash(txt): ''' Returns the SHA256 Hashed secret for use", "or panorama to read/write to the respective directories :param archivetime:", "with user defined values Output is a set of loadable", "IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES", "jinja_context def template_render(filename, template_path, render_type, context): ''' render the jinja", "aligns with folder name :return: no value returned (future could", "{0}'.format(snippet_name)) filename = snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as", "''' return sha256_crypt.hash(txt) def sha512_hash(txt): ''' Returns the SHA512 Hashed", "config directory we want (panos / panorama) myconfigpath = os.path.abspath(os.path.join('..',", "the configuration; no default admin/admin used input_var['ADMINISTRATOR_USERNAME'] = input('Enter the", "name :param render_type: type eg. if full or snippets; aligns", "file :param foldertime: datetime when script run; to be used", "# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR", "name + datetime :param snippet_name: name of the output file", "to Iron-Skillet'.center(80)) print(' ') print('=' * 80) input_var = {}", "loadable_configs dir myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render full and", "') print('=' * 80) input_var = {} # archive_time used", "the output directory input_var['output_dir'] = input('Enter the name of the", "# render full and set conf files print('\\nworking with {0}", "archivetime: datetimestamp used for the output my_config folder naming '''", "Returns the MD5 Hashed secret for use as a password", "THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND", "WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL", "') # this prompts for the superuser username to be", "configs with jinja2 inputs are read from the template directory", "the output directory: ') # this prompts for the superuser", "and added to the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False", "LOSS OF USE, DATA OR PROFITS, WHETHER IN AN #", "values try: with open(config_var_file, 'r') as var_metadata: variables = oyaml.safe_load(var_metadata.read())", ":param render_type: type if full or set commands; aligns with", "for custom jinja filters def md5_hash(txt): ''' Returns the MD5", "is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new subdirectories for {0}'.format(config_type)) return", "'r') as var_metadata: variables = oyaml.safe_load(var_metadata.read()) except IOError as ioe:", "to the config directory we want (panos / panorama) myconfigpath", "to my_config :param config_type: panos or panorama to read/write to", "print('\\na phash will be created for superuser {0} and added", "default admin/admin while passwordmatch is False: password1 = getpass.getpass(\"Enter the", "THIS SOFTWARE. # Author: <NAME> <<EMAIL>> ''' Palo Alto Networks", "shutil import sys import time import getpass import oyaml from", "type eg. if full or snippets; aligns with folder name", "myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render full and set conf files print('\\nworking", "except IOError as ioe: print(f'Could not open metadata file {config_var_file}')", "the context value from config_variables.yaml :param filename: name of the", "through all config types that have their respective template folders", "ioe: print(f'Could not open metadata file {config_var_file}') print(ioe) sys.exit() #", "created for superuser {0} and added to the config file\\n'.format(", "import getpass import oyaml from jinja2 import Environment, FileSystemLoader from", "datetime when script run; to be used as suffix of", "Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load our custom jinja filters here, see", "the sys path for module lookup sys.path.append(template_path) # output subdir", "full or set commands; aligns with folder name :param context:", "want (panos / panorama) template_path = os.path.abspath(os.path.join('..', 'templates', config_type)) #", "context = create_context(config_variables) # update context dict with variables from", "file used for the render into the my_template folder var_file", "sys path for module lookup sys.path.append(template_path) # output subdir located", "variables and render the output configs with jinja2 inputs are", "used as folder name :param render_type: type eg. if full", "sha256_crypt.hash(txt) def sha512_hash(txt): ''' Returns the SHA512 Hashed secret for", "configfile: configfile.write(element) # copy the variables file used for the", "the full path to the config directory we want (panos", "superuser administrator account username: ') print('\\na phash will be created", "open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile: configfile.write(element) # copy the", "= os.path.abspath(os.path.join('..', 'templates', config_type)) # append to the sys path", "element): ''' after rendering the template save to the myconfig", "shutil.copy(vfilesrc, vfiledst) return # define functions for custom jinja filters", "defs below for reference env.filters['md5_hash'] = md5_hash env.filters['des_hash'] = des_hash", "prompts for the superuser username to be added into the", "key-based dictionary jinja_context = dict() for snippet_var in variables['variables']: jinja_context[snippet_var['name']]", "the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False # prompt for", "= <PASSWORD> else: print('\\nPasswords do not match. Please try again.\\n')", "else: print('\\nPasswords do not match. Please try again.\\n') # loop", "''' create a new main loadable_configs folder if required then", "DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING", "configs :param myconfigdir_name: prefix folder name from the my_variables.py file", "sha256_crypt from passlib.hash import sha512_crypt defined_filters = ['md5_hash', 'des_hash', 'sha512_hash']", "read the metafile to get variables and values try: with", "= env.get_template(filename) rendered_template = template.render(context) return rendered_template def template_save(snippet_name, myconfigdir,", "env.filters['des_hash'] = des_hash env.filters['sha512_hash'] = sha512_hash template = env.get_template(filename) rendered_template", "required then new subdirectories for configs :param myconfigdir_name: prefix folder", "creation: {0}\\n'.format(input_var['archive_time'])) # this prompts for the prefix name of", "from passlib.hash import sha512_crypt defined_filters = ['md5_hash', 'des_hash', 'sha512_hash'] def", "myconfig_newdir(myconfigdir_name, foldertime): ''' create a new main loadable_configs folder if", "one # then create snippets and full sub-directories myconfigdir =", "load our custom jinja filters here, see the function defs", "= sha512_hash template = env.get_template(filename) rendered_template = template.render(context) return rendered_template", "''' Returns the SHA512 Hashed secret for use as a", "run the script This software is provided without support, warranty,", "OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS", "unique folder name print('=' * 80) print(' ') print('Welcome to", "') print('Welcome to Iron-Skillet'.center(80)) print(' ') print('=' * 80) input_var", "print('\\nworking with {0} config template'.format(render_type)) if render_type == 'full': filename", "rendered_template = template.render(context) return rendered_template def template_save(snippet_name, myconfigdir, config_type, element):", "jinja template using the context value from config_variables.yaml :param filename:", "This software is provided without support, warranty, or guarantee. Use", "render_type: type eg. if full or snippets; aligns with folder", "full path name ''' # get the full path to", "'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename, template_path, render_type, context) template_save(filename, myconfig_path, config_type,", "template_save(filename, myconfig_path, config_type, element) print('\\nconfigs have been created and can", "admin/admin while passwordmatch is False: password1 = getpass.getpass(\"Enter the superuser", "''' config_variables = 'config_variables.yaml' # create dict of values for", "THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT,", "PanOS configuration :param txt: text to be hashed :return: password", "path name ''' # get the full path to the", "template for {0}'.format(snippet_name)) filename = snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename),", "loop through all config types that have their respective template", "the name of the output directory: ') # this prompts", "SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS.", "folder creation: {0}\\n'.format(input_var['archive_time'])) # this prompts for the prefix name", "OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION", "directory we want (panos / panorama) myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs'))", "file and set conf file ''' print('..creating template for {0}'.format(filename))", "password: \") password2 = getpass.getpass(\"Enter password again to verify: \")", "the output file :param myconfigdir: path to the my_config directory", "md5_crypt from passlib.hash import sha256_crypt from passlib.hash import sha512_crypt defined_filters", "conf file ''' print('..creating template for {0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path,", "phash field in the configurations ''' return sha512_crypt.hash(txt) def replace_variables(config_type,", "configurations ''' return sha256_crypt.hash(txt) def sha512_hash(txt): ''' Returns the SHA512", "the config_variables.yaml values and then run the script This software", "file :param render_type: type if full or set commands; aligns", "element rendered based on input variables; used as folder name", "the MD5 Hashed secret for use as a password hash", "template'.format(render_type)) if render_type == 'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type) if render_type", "panos or panorama to read/write to the respective directories :param", "of the output file :param myconfigdir: path to the my_config", "copyright notice and this permission notice appear in all copies.", "input for snippet_var in input_var: context[snippet_var] = input_var[snippet_var] # get", "for snippet_var in input_var: context[snippet_var] = input_var[snippet_var] # get the", "values and then run the script This software is provided", "Edit the config_variables.yaml values and then run the script This", "values for the jinja template render context = create_context(config_variables) #", "= 'iron_skillet_{0}_full.xml'.format(config_type) if render_type == 'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type) element", "try again.\\n') # loop through all config types that have", "not create a new one # then create snippets and", "passlib.hash import sha512_crypt defined_filters = ['md5_hash', 'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name,", "prefix folder name from the my_variables.py file :param foldertime: datetime", "custom jinja filters here, see the function defs below for", "des_crypt.hash(txt) def sha256_hash(txt): ''' Returns the SHA256 Hashed secret for", "file :param myconfigdir: path to the my_config directory :param config_type:", "SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES #", "for the prefix name of the output directory input_var['output_dir'] =", "this prompts for the superuser username to be added into", "a phash and store in the my_config files; no default", "be used as suffix of folder name :return: the myconfigdir", ":return: no value returned (future could be success code) '''", "DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT,", "main loadable_configs folder if required then new subdirectories for configs", "below for reference env.filters['md5_hash'] = md5_hash env.filters['des_hash'] = des_hash env.filters['sha512_hash']", "element) print('\\nconfigs have been created and can be found in", "variables to render :return: return the rendered xml file and", "the phash field in the configurations ''' return sha512_crypt.hash(txt) def", "from jinja2 import Environment, FileSystemLoader from passlib.hash import des_crypt from", "for configs :param myconfigdir_name: prefix folder name from the my_variables.py", "as part of the my_config directory name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S')", "for Panos and Panorama Edit the config_variables.yaml values and then", "print(' ') print('Welcome to Iron-Skillet'.center(80)) print(' ') print('=' * 80)", ":param template_path: path for the template file :param render_type: type", "PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES #", "''' import datetime import os import shutil import sys import", "/ panorama) myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath) is False:", "have their respective template folders for config_type in ['panos', 'panorama']:", "template_render(filename, template_path, render_type, context): ''' render the jinja template using", "the respective directories :param archivetime: datetimestamp used for the output", "user defined values Output is a set of loadable full", "datetimestamp used for the output my_config folder naming ''' config_variables", "False # prompt for the superuser password to create a", "verify: \") if password1 == password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch", "full configurations and set commands for Panos and Panorama Edit", "== 'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename, template_path, render_type,", "'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime): ''' create a new main", "do not match. Please try again.\\n') # loop through all", "commands for Panos and Panorama Edit the config_variables.yaml values and", "FileSystemLoader from passlib.hash import des_crypt from passlib.hash import md5_crypt from", "print('..creating template for {0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load", "have been created and can be found in {0}'.format(myconfig_path)) print('along", "= os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath, mode=0o755) print('created", "if required then new subdirectories for configs :param myconfigdir_name: prefix", "template folders for config_type in ['panos', 'panorama']: for render_type in", "and/or distribute this software for any # purpose with or", "IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS", "the phash field in the configurations ''' return des_crypt.hash(txt) def", "each run saves with a unique prefix name + datetime", "render_type, input_var): ''' get the input variables and render the", "import sha256_crypt from passlib.hash import sha512_crypt defined_filters = ['md5_hash', 'des_hash',", "configurations ''' return sha512_crypt.hash(txt) def replace_variables(config_type, render_type, input_var): ''' get", "passwordmatch is False: password1 = getpass.getpass(\"Enter the superuser administrator account", "env.filters['sha512_hash'] = sha512_hash template = env.get_template(filename) rendered_template = template.render(context) return", "the input variables and render the output configs with jinja2", "Networks # # Permission to use, copy, modify, and/or distribute", "from the my_variables.py file :param foldertime: datetime when script run;", "USE OR PERFORMANCE OF THIS SOFTWARE. # Author: <NAME> <<EMAIL>>", "input_var['archive_time']) # render full and set conf files print('\\nworking with", "{0}'.format(myconfig_path)) print('along with the metadata values used to render the", "return md5_crypt.hash(txt) def des_hash(txt): ''' Returns the DES Hashed secret", "FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN", "without support, warranty, or guarantee. Use at your own risk.", "above # copyright notice and this permission notice appear in", "myconfigdir, config_type, element): ''' after rendering the template save to", "Output is a set of loadable full configurations and set", "new archive folder {0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False:", "var_file)) is False: vfilesrc = var_file vfiledst = '{0}/{1}'.format(myconfigdir, var_file)", "the PanOS configuration :param txt: text to be hashed :return:", "def myconfig_newdir(myconfigdir_name, foldertime): ''' create a new main loadable_configs folder", ":param txt: text to be hashed :return: password hash of", "myconfigdir: path to the my_config directory :param config_type: based on", "in the phash field in the configurations ''' return des_crypt.hash(txt)", "for {0}'.format(snippet_name)) filename = snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w')", "full and set conf files print('\\nworking with {0} config template'.format(render_type))", "values Output is a set of loadable full configurations and", "dir myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render full and set", "default admin/admin used input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator account", "all copies. # # THE SOFTWARE IS PROVIDED \"AS IS\"", "AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO", "list; eg. panos or panorama :param element: xml element rendered", "as suffix of folder name :return: the myconfigdir full path", "input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator account username: ') print('\\na", "var_metadata: variables = oyaml.safe_load(var_metadata.read()) except IOError as ioe: print(f'Could not", "my_variables.py file :param foldertime: datetime when script run; to be", "the DES Hashed secret for use as a password hash", "passlib.hash import sha256_crypt from passlib.hash import sha512_crypt defined_filters = ['md5_hash',", "config_type: panos or panorama to read/write to the respective directories", "superuser username to be added into the configuration; no default", "from passlib.hash import md5_crypt from passlib.hash import sha256_crypt from passlib.hash", "my_template folder var_file = 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False:", "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF #", "snippet_var in input_var: context[snippet_var] = input_var[snippet_var] # get the full", "and then run the script This software is provided without", "MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE", "== password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch = <PASSWORD> else: print('\\nPasswords", "# purpose with or without fee is hereby granted, provided", "set commands; aligns with folder name :param context: dict of", "OUT OF # OR IN CONNECTION WITH THE USE OR", "output directory: ') # this prompts for the superuser username", "in the my_config files; no default admin/admin while passwordmatch is", "no value returned (future could be success code) ''' print('..saving", "configs\\n') return if __name__ == '__main__': # Use the timestamp", "added into the configuration; no default admin/admin used input_var['ADMINISTRATOR_USERNAME'] =", "no default admin/admin while passwordmatch is False: password1 = getpass.getpass(\"Enter", "script This software is provided without support, warranty, or guarantee.", "sha512_hash template = env.get_template(filename) rendered_template = template.render(context) return rendered_template def", "update context dict with variables from user input for snippet_var", "print('\\ndatetime used for folder creation: {0}\\n'.format(input_var['archive_time'])) # this prompts for", "not open metadata file {config_var_file}') print(ioe) sys.exit() # grab the", "= template.render(context) return rendered_template def template_save(snippet_name, myconfigdir, config_type, element): '''", "guarantee. Use at your own risk. ''' import datetime import", "datetime import os import shutil import sys import time import", "# archive_time used as part of the my_config directory name", "the template directory and output to my_config :param config_type: panos", "OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF", "the configurations ''' return des_crypt.hash(txt) def sha256_hash(txt): ''' Returns the", "folder name print('=' * 80) print(' ') print('Welcome to Iron-Skillet'.center(80))", "used input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator account username: ')", "EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL,", "snippet_var in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context def template_render(filename,", "WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF", "TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH", "folder name from the my_variables.py file :param foldertime: datetime when", "our custom jinja filters here, see the function defs below", "jinja2 inputs are read from the template directory and output", "''' Returns the MD5 Hashed secret for use as a", "for module lookup sys.path.append(template_path) # output subdir located in loadable_configs", "their respective template folders for config_type in ['panos', 'panorama']: for", "subdirectories for {0}'.format(config_type)) return myconfigdir def create_context(config_var_file): # read the", "False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new subdirectories for {0}'.format(config_type)) return myconfigdir", "into the configuration; no default admin/admin used input_var['ADMINISTRATOR_USERNAME'] = input('Enter", "= create_context(config_variables) # update context dict with variables from user", "and store in the my_config files; no default admin/admin while", "print('\\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is", "# check that configs folder exists and if not create", "False: os.mkdir(myconfigpath, mode=0o755) print('created new loadable config directory') # check", "name of the output directory: ') # this prompts for", "folder if required then new subdirectories for configs :param myconfigdir_name:", "config template'.format(render_type)) if render_type == 'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type) if", "IOError as ioe: print(f'Could not open metadata file {config_var_file}') print(ioe)", "if __name__ == '__main__': # Use the timestamp to create", "get variables and values try: with open(config_var_file, 'r') as var_metadata:", "purpose with or without fee is hereby granted, provided that", "with a unique prefix name + datetime :param snippet_name: name", "Returns the DES Hashed secret for use as a password", "name ''' # get the full path to the config", "function defs below for reference env.filters['md5_hash'] = md5_hash env.filters['des_hash'] =", "== 'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type) if render_type == 'set_commands': filename", "for superuser {0} and added to the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME']))", "template directory and output to my_config :param config_type: panos or", "as folder name :param render_type: type eg. if full or", "used for the output my_config folder naming ''' config_variables =", "print('\\nPasswords do not match. Please try again.\\n') # loop through", "text to be hashed :return: password hash of the string", "= var_file vfiledst = '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst) return #", "to be added into the configuration; no default admin/admin used", "salt and configuration information. Suitable to place in the phash", "= False # prompt for the superuser password to create", "filters def md5_hash(txt): ''' Returns the MD5 Hashed secret for", "path for module lookup sys.path.append(template_path) # output subdir located in", "if os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath, mode=0o755) print('created new loadable config", "create a phash and store in the my_config files; no", "['md5_hash', 'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime): ''' create a new", "OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN", ":param myconfigdir_name: prefix folder name from the my_variables.py file :param", "filename = 'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename, template_path, render_type, context) template_save(filename,", "NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY", "# copyright notice and this permission notice appear in all", "myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir,", "jinja filters here, see the function defs below for reference", "directory :param config_type: based on initial run list; eg. panos", "WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER", ":param archivetime: datetimestamp used for the output my_config folder naming", ":param myconfigdir: path to the my_config directory :param config_type: based", "configurations ''' return md5_crypt.hash(txt) def des_hash(txt): ''' Returns the DES", "to the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False # prompt", "unique prefix name + datetime :param snippet_name: name of the", "or set commands; aligns with folder name :param context: dict", "the template file :param template_path: path for the template file", "<<EMAIL>> ''' Palo Alto Networks create_loadable_configs.py Provides rendering of configuration", "prompts for the prefix name of the output directory input_var['output_dir']", "Alto Networks create_loadable_configs.py Provides rendering of configuration templates with user", "with folder name :return: no value returned (future could be", "prompt for the superuser password to create a phash and", "directory: ') # this prompts for the superuser username to", "to create a unique folder name print('=' * 80) print('", "for reference env.filters['md5_hash'] = md5_hash env.filters['des_hash'] = des_hash env.filters['sha512_hash'] =", "datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for folder creation: {0}\\n'.format(input_var['archive_time'])) # this prompts", "Use at your own risk. ''' import datetime import os", "could be success code) ''' print('..saving template for {0}'.format(snippet_name)) filename", "user input for snippet_var in input_var: context[snippet_var] = input_var[snippet_var] #", "folder {0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type))", "of variables to render :return: return the rendered xml file", "run saves with a unique prefix name + datetime :param", "the superuser username to be added into the configuration; no", "OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE", "folder var_file = 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False: vfilesrc", "= {} # archive_time used as part of the my_config", "template for {0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load our", "if render_type == 'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename,", "= getpass.getpass(\"Enter the superuser administrator account password: \") password2 =", "read from the template directory and output to my_config :param", "import time import getpass import oyaml from jinja2 import Environment,", "to get variables and values try: with open(config_var_file, 'r') as", "configuration :param txt: text to be hashed :return: password hash", "import oyaml from jinja2 import Environment, FileSystemLoader from passlib.hash import", "''' print('..saving template for {0}'.format(snippet_name)) filename = snippet_name with open('{0}/{1}/{2}'.format(myconfigdir,", "print('\\nconfigs have been created and can be found in {0}'.format(myconfig_path))", "if os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new archive folder", "in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context def template_render(filename, template_path,", "folders for config_type in ['panos', 'panorama']: for render_type in ['full',", "datetime :param snippet_name: name of the output file :param myconfigdir:", "the script This software is provided without support, warranty, or", "# loop through all config types that have their respective", "can be found in {0}'.format(myconfig_path)) print('along with the metadata values", "dictionary jinja_context = dict() for snippet_var in variables['variables']: jinja_context[snippet_var['name']] =", "dict of values for the jinja template render context =", "the output my_config folder naming ''' config_variables = 'config_variables.yaml' #", "superuser administrator account password: \") password2 = getpass.getpass(\"Enter password again", "(future could be success code) ''' print('..saving template for {0}'.format(snippet_name))", "password1 == password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch = <PASSWORD> else:", "at your own risk. ''' import datetime import os import", "with salt and configuration information. Suitable to place in the", "= 'config_variables.yaml' # create dict of values for the jinja", "used for folder creation: {0}\\n'.format(input_var['archive_time'])) # this prompts for the", "create_context(config_var_file): # read the metafile to get variables and values", "on initial run list; eg. panos or panorama :param element:", "configurations and set commands for Panos and Panorama Edit the", "with open(config_var_file, 'r') as var_metadata: variables = oyaml.safe_load(var_metadata.read()) except IOError", "created and can be found in {0}'.format(myconfig_path)) print('along with the", "phash will be created for superuser {0} and added to", "config_type: based on initial run list; eg. panos or panorama", "render_type: type if full or set commands; aligns with folder", "risk. ''' import datetime import os import shutil import sys", "create_context(config_variables) # update context dict with variables from user input", "2018, Palo Alto Networks # # Permission to use, copy,", "Suitable to place in the phash field in the configurations", "returned (future could be success code) ''' print('..saving template for", "in input_var: context[snippet_var] = input_var[snippet_var] # get the full path", "ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL", "used to render the configs\\n') return if __name__ == '__main__':", "from config_variables.yaml :param filename: name of the template file :param", ":param config_type: based on initial run list; eg. panos or", "os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new subdirectories for {0}'.format(config_type)) return myconfigdir def", "set conf file ''' print('..creating template for {0}'.format(filename)) env =", "open(config_var_file, 'r') as var_metadata: variables = oyaml.safe_load(var_metadata.read()) except IOError as", "vfiledst = '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst) return # define functions", "password2 = getpass.getpass(\"Enter password again to verify: \") if password1", "field in the configurations ''' return md5_crypt.hash(txt) def des_hash(txt): '''", "xml file and set conf file ''' print('..creating template for", "vfilesrc = var_file vfiledst = '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst) return", "be hashed :return: password hash of the string with salt", "initial run list; eg. panos or panorama :param element: xml", "added to the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False #", "txt: text to be hashed :return: password hash of the", "the config directory we want (panos / panorama) myconfigpath =", "env.get_template(filename) rendered_template = template.render(context) return rendered_template def template_save(snippet_name, myconfigdir, config_type,", "support, warranty, or guarantee. Use at your own risk. '''", "from passlib.hash import sha256_crypt from passlib.hash import sha512_crypt defined_filters =", "secret for use as a password hash in the PanOS", "= snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile: configfile.write(element)", "# # THE SOFTWARE IS PROVIDED \"AS IS\" AND THE", "Provides rendering of configuration templates with user defined values Output", "name :return: no value returned (future could be success code)", "with the metadata values used to render the configs\\n') return", "for any # purpose with or without fee is hereby", "your own risk. ''' import datetime import os import shutil", "# copy the variables file used for the render into", "os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new subdirectories for", "panorama :param element: xml element rendered based on input variables;", "ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT", "respective directories :param archivetime: datetimestamp used for the output my_config", "template render context = create_context(config_variables) # update context dict with", "{0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load our custom jinja", "THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS", "no default admin/admin used input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator", "template using the context value from config_variables.yaml :param filename: name", "the my_variables.py file :param foldertime: datetime when script run; to", "copies. # # THE SOFTWARE IS PROVIDED \"AS IS\" AND", "print('..saving template for {0}'.format(snippet_name)) filename = snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type,", "= dict() for snippet_var in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return", "is provided without support, warranty, or guarantee. Use at your", "panos or panorama :param element: xml element rendered based on", "metadata values and convert to key-based dictionary jinja_context = dict()", "\"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH", "administrator account username: ') print('\\na phash will be created for", "print(' ') print('=' * 80) input_var = {} # archive_time", "# get the full path to the output directory we", "after rendering the template save to the myconfig directory each", "sha256_hash(txt): ''' Returns the SHA256 Hashed secret for use as", "this prompts for the prefix name of the output directory", "from user input for snippet_var in input_var: context[snippet_var] = input_var[snippet_var]", "Iron-Skillet'.center(80)) print(' ') print('=' * 80) input_var = {} #", "file ''' print('..creating template for {0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type)))", "<NAME> <<EMAIL>> ''' Palo Alto Networks create_loadable_configs.py Provides rendering of", "template file :param render_type: type if full or set commands;", "# # Permission to use, copy, modify, and/or distribute this", "render the output configs with jinja2 inputs are read from", "config_type, element): ''' after rendering the template save to the", "in the configurations ''' return sha256_crypt.hash(txt) def sha512_hash(txt): ''' Returns", "path to the my_config directory :param config_type: based on initial", "my_config :param config_type: panos or panorama to read/write to the", "defined values Output is a set of loadable full configurations", "'iron_skillet_{0}_full.xml'.format(config_type) if render_type == 'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type) element =", "create dict of values for the jinja template render context", "convert to key-based dictionary jinja_context = dict() for snippet_var in", "jinja2 import Environment, FileSystemLoader from passlib.hash import des_crypt from passlib.hash", "# Copyright (c) 2018, Palo Alto Networks # # Permission", "into the my_template folder var_file = 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file))", "directories :param archivetime: datetimestamp used for the output my_config folder", "custom jinja filters def md5_hash(txt): ''' Returns the MD5 Hashed", "variables = oyaml.safe_load(var_metadata.read()) except IOError as ioe: print(f'Could not open", "a unique folder name print('=' * 80) print(' ') print('Welcome", "admin/admin used input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser administrator account username:", "dict() for snippet_var in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context", "ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE", "Returns the SHA512 Hashed secret for use as a password", "Panos and Panorama Edit the config_variables.yaml values and then run", "* 80) input_var = {} # archive_time used as part", "located in loadable_configs dir myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render", "return sha256_crypt.hash(txt) def sha512_hash(txt): ''' Returns the SHA512 Hashed secret", ":return: return the rendered xml file and set conf file", "SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL", "Permission to use, copy, modify, and/or distribute this software for", "return the rendered xml file and set conf file '''", "{0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created", "') print('\\na phash will be created for superuser {0} and", "{0} and added to the config file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch =", "file\\n'.format( input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False # prompt for the superuser", "hash of the string with salt and configuration information. Suitable", "the timestamp to create a unique folder name print('=' *", "Environment, FileSystemLoader from passlib.hash import des_crypt from passlib.hash import md5_crypt", "define functions for custom jinja filters def md5_hash(txt): ''' Returns", "PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR", "snippet_var['value'] return jinja_context def template_render(filename, template_path, render_type, context): ''' render", "with jinja2 inputs are read from the template directory and", "to read/write to the respective directories :param archivetime: datetimestamp used", "output my_config folder naming ''' config_variables = 'config_variables.yaml' # create", "# output subdir located in loadable_configs dir myconfig_path = myconfig_newdir(input_var['output_dir'],", "to the myconfig directory each run saves with a unique", "name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for folder creation: {0}\\n'.format(input_var['archive_time']))", "SHA256 Hashed secret for use as a password hash in", "dict with variables from user input for snippet_var in input_var:", "input_var: context[snippet_var] = input_var[snippet_var] # get the full path to", "in {0}'.format(myconfig_path)) print('along with the metadata values used to render", "''' Returns the DES Hashed secret for use as a", ":param config_type: panos or panorama to read/write to the respective", "and this permission notice appear in all copies. # #", "saves with a unique prefix name + datetime :param snippet_name:", "for the jinja template render context = create_context(config_variables) # update", "as configfile: configfile.write(element) # copy the variables file used for", "copy, modify, and/or distribute this software for any # purpose", "a password hash in the PanOS configuration :param txt: text", "the template save to the myconfig directory each run saves", "file {config_var_file}') print(ioe) sys.exit() # grab the metadata values and", "''' after rendering the template save to the myconfig directory", "eg. panos or panorama :param element: xml element rendered based", "to use, copy, modify, and/or distribute this software for any", "a set of loadable full configurations and set commands for", "def template_render(filename, template_path, render_type, context): ''' render the jinja template", "hash in the PanOS configuration :param txt: text to be", "run list; eg. panos or panorama :param element: xml element", "snippets and full sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if", "timestamp to create a unique folder name print('=' * 80)", "return if __name__ == '__main__': # Use the timestamp to", "the above # copyright notice and this permission notice appear", "phash field in the configurations ''' return des_crypt.hash(txt) def sha256_hash(txt):", "= myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render full and set conf files", "used as suffix of folder name :return: the myconfigdir full", "= input_var[snippet_var] # get the full path to the output", "Palo Alto Networks # # Permission to use, copy, modify,", "myconfigdir def create_context(config_var_file): # read the metafile to get variables", "values used to render the configs\\n') return if __name__ ==", "__name__ == '__main__': # Use the timestamp to create a", "Use the timestamp to create a unique folder name print('='", "(c) 2018, Palo Alto Networks # # Permission to use,", "IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD", "configurations ''' return des_crypt.hash(txt) def sha256_hash(txt): ''' Returns the SHA256", "password hash in the PanOS configuration :param txt: text to", "open metadata file {config_var_file}') print(ioe) sys.exit() # grab the metadata", "return jinja_context def template_render(filename, template_path, render_type, context): ''' render the", "print('=' * 80) print(' ') print('Welcome to Iron-Skillet'.center(80)) print(' ')", "jinja_context = dict() for snippet_var in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value']", "# this prompts for the superuser username to be added", "if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new subdirectories", "USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF", "and convert to key-based dictionary jinja_context = dict() for snippet_var", "directory name input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for folder creation:", "# prompt for the superuser password to create a phash", "directory') # check that configs folder exists and if not", "oyaml from jinja2 import Environment, FileSystemLoader from passlib.hash import des_crypt", "configuration templates with user defined values Output is a set", "metafile to get variables and values try: with open(config_var_file, 'r')", "field in the configurations ''' return des_crypt.hash(txt) def sha256_hash(txt): '''", "template_path: path for the template file :param render_type: type if", "IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT", "as ioe: print(f'Could not open metadata file {config_var_file}') print(ioe) sys.exit()", "for {0}'.format(filename)) env = Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load our custom", "= md5_hash env.filters['des_hash'] = des_hash env.filters['sha512_hash'] = sha512_hash template =", "loadable full configurations and set commands for Panos and Panorama", "config directory') # check that configs folder exists and if", "vfiledst) return # define functions for custom jinja filters def", "''' get the input variables and render the output configs", "with variables from user input for snippet_var in input_var: context[snippet_var]", "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR", "= Environment(loader=FileSystemLoader('{0}/{1}'.format(template_path, render_type))) # load our custom jinja filters here,", "{0} config template'.format(render_type)) if render_type == 'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type)", "'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename, template_path, render_type, context)", "= datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for folder creation: {0}\\n'.format(input_var['archive_time'])) # this", "variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context def template_render(filename, template_path, render_type,", "software for any # purpose with or without fee is", "if full or snippets; aligns with folder name :return: no", "is False: password1 = getpass.getpass(\"Enter the superuser administrator account password:", "''' return md5_crypt.hash(txt) def des_hash(txt): ''' Returns the DES Hashed", "the function defs below for reference env.filters['md5_hash'] = md5_hash env.filters['des_hash']", "# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING", "''' return des_crypt.hash(txt) def sha256_hash(txt): ''' Returns the SHA256 Hashed", "CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS", "hereby granted, provided that the above # copyright notice and", "configs folder exists and if not create a new one", "the rendered xml file and set conf file ''' print('..creating", "templates with user defined values Output is a set of", "fee is hereby granted, provided that the above # copyright", "in the phash field in the configurations ''' return md5_crypt.hash(txt)", "name print('=' * 80) print(' ') print('Welcome to Iron-Skillet'.center(80)) print('", "password to create a phash and store in the my_config", "the my_config files; no default admin/admin while passwordmatch is False:", "or guarantee. Use at your own risk. ''' import datetime", "render the configs\\n') return if __name__ == '__main__': # Use", "# then create snippets and full sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath,", "INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN", "to render :return: return the rendered xml file and set", "OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE,", "config_type, element) print('\\nconfigs have been created and can be found", "\") if password1 == password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch =", "metadata file {config_var_file}') print(ioe) sys.exit() # grab the metadata values", "(panos / panorama) myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath) is", "and Panorama Edit the config_variables.yaml values and then run the", "OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION", "in the configurations ''' return sha512_crypt.hash(txt) def replace_variables(config_type, render_type, input_var):", "sys.exit() # grab the metadata values and convert to key-based", "def sha512_hash(txt): ''' Returns the SHA512 Hashed secret for use", "configuration; no default admin/admin used input_var['ADMINISTRATOR_USERNAME'] = input('Enter the superuser", "defined_filters = ['md5_hash', 'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime): ''' create", "= 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False: vfilesrc = var_file", "config_type, filename), 'w') as configfile: configfile.write(element) # copy the variables", "print('Welcome to Iron-Skillet'.center(80)) print(' ') print('=' * 80) input_var =", "the jinja template render context = create_context(config_variables) # update context", "template_path = os.path.abspath(os.path.join('..', 'templates', config_type)) # append to the sys", "name :return: the myconfigdir full path name ''' # get", "input('Enter the name of the output directory: ') # this", "directory we want (panos / panorama) template_path = os.path.abspath(os.path.join('..', 'templates',", "create_loadable_configs.py Provides rendering of configuration templates with user defined values", "full path to the config directory we want (panos /", "template_save(snippet_name, myconfigdir, config_type, element): ''' after rendering the template save", "used for the render into the my_template folder var_file =", "Author: <NAME> <<EMAIL>> ''' Palo Alto Networks create_loadable_configs.py Provides rendering", ":param snippet_name: name of the output file :param myconfigdir: path", "hashed :return: password hash of the string with salt and", "RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN", "+ datetime :param snippet_name: name of the output file :param", "phash field in the configurations ''' return md5_crypt.hash(txt) def des_hash(txt):", "SHA512 Hashed secret for use as a password hash in", "respective template folders for config_type in ['panos', 'panorama']: for render_type", "BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL", "IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR #", "snippet_name: name of the output file :param myconfigdir: path to", "passwordmatch = <PASSWORD> else: print('\\nPasswords do not match. Please try", "# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES", "AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,", "render_type))) # load our custom jinja filters here, see the", "to the sys path for module lookup sys.path.append(template_path) # output", "and can be found in {0}'.format(myconfig_path)) print('along with the metadata", "create a new one # then create snippets and full", "'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False: vfilesrc = var_file vfiledst", "files; no default admin/admin while passwordmatch is False: password1 =", "os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir,", "of the string with salt and configuration information. Suitable to", "var_file) shutil.copy(vfilesrc, vfiledst) return # define functions for custom jinja", "AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE", "os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath, mode=0o755) print('created new", "be success code) ''' print('..saving template for {0}'.format(snippet_name)) filename =", "the configurations ''' return md5_crypt.hash(txt) def des_hash(txt): ''' Returns the", "get the full path to the output directory we want", "variables file used for the render into the my_template folder", "os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False: vfilesrc = var_file vfiledst = '{0}/{1}'.format(myconfigdir,", "MD5 Hashed secret for use as a password hash in", "inputs are read from the template directory and output to", "the metadata values used to render the configs\\n') return if", "input_var['ADMINISTRATOR_USERNAME'])) passwordmatch = False # prompt for the superuser password", "be added into the configuration; no default admin/admin used input_var['ADMINISTRATOR_USERNAME']", "myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath, mode=0o755)", "oyaml.safe_load(var_metadata.read()) except IOError as ioe: print(f'Could not open metadata file", "SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT,", "if full or set commands; aligns with folder name :param", "env.filters['md5_hash'] = md5_hash env.filters['des_hash'] = des_hash env.filters['sha512_hash'] = sha512_hash template", "the variables file used for the render into the my_template", "jinja filters def md5_hash(txt): ''' Returns the MD5 Hashed secret", "config types that have their respective template folders for config_type", "Copyright (c) 2018, Palo Alto Networks # # Permission to", "rendered_template def template_save(snippet_name, myconfigdir, config_type, element): ''' after rendering the", "output directory we want (panos / panorama) template_path = os.path.abspath(os.path.join('..',", "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING", "name of the template file :param template_path: path for the", "AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR", "run; to be used as suffix of folder name :return:", "of loadable full configurations and set commands for Panos and", "to verify: \") if password1 == password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD>", "in loadable_configs dir myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time']) # render full", "# append to the sys path for module lookup sys.path.append(template_path)", "= ['md5_hash', 'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime): ''' create a", "sha512_crypt.hash(txt) def replace_variables(config_type, render_type, input_var): ''' get the input variables", "foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir, config_type)) print('created new", "= <PASSWORD> passwordmatch = <PASSWORD> else: print('\\nPasswords do not match.", "render full and set conf files print('\\nworking with {0} config", "TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY", "store in the my_config files; no default admin/admin while passwordmatch", ":param render_type: type eg. if full or snippets; aligns with", "new subdirectories for configs :param myconfigdir_name: prefix folder name from", "panorama) myconfigpath = os.path.abspath(os.path.join('..', 'loadable_configs')) if os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath,", "jinja template render context = create_context(config_variables) # update context dict", "that the above # copyright notice and this permission notice", "context): ''' render the jinja template using the context value", "and render the output configs with jinja2 inputs are read", "if password1 == password2: input_var['ADMINISTRATOR_PASSWORD'] = <PASSWORD> passwordmatch = <PASSWORD>", "myconfigdir_name: prefix folder name from the my_variables.py file :param foldertime:", "new subdirectories for {0}'.format(config_type)) return myconfigdir def create_context(config_var_file): # read", "to render the configs\\n') return if __name__ == '__main__': #", "input_var[snippet_var] # get the full path to the output directory", "be found in {0}'.format(myconfig_path)) print('along with the metadata values used", "rendering of configuration templates with user defined values Output is", "distribute this software for any # purpose with or without", "and set conf files print('\\nworking with {0} config template'.format(render_type)) if", "print('=' * 80) input_var = {} # archive_time used as", "output directory input_var['output_dir'] = input('Enter the name of the output", "or snippets; aligns with folder name :return: no value returned", "get the full path to the config directory we want", "name of the output file :param myconfigdir: path to the", "copy the variables file used for the render into the", "of the output directory: ') # this prompts for the", "= snippet_var['value'] return jinja_context def template_render(filename, template_path, render_type, context): '''", "'__main__': # Use the timestamp to create a unique folder", "when script run; to be used as suffix of folder", "''' render the jinja template using the context value from", "the configurations ''' return sha512_crypt.hash(txt) def replace_variables(config_type, render_type, input_var): '''", ":return: the myconfigdir full path name ''' # get the", "again to verify: \") if password1 == password2: input_var['ADMINISTRATOR_PASSWORD'] =", "'w') as configfile: configfile.write(element) # copy the variables file used", "import des_crypt from passlib.hash import md5_crypt from passlib.hash import sha256_crypt", "{0}'.format(config_type)) return myconfigdir def create_context(config_var_file): # read the metafile to", "= input('Enter the superuser administrator account username: ') print('\\na phash", "snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile: configfile.write(element) #", "ARISING OUT OF # OR IN CONNECTION WITH THE USE", "'loadable_configs')) if os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath, mode=0o755) print('created new loadable", "<gh_stars>0 # Copyright (c) 2018, Palo Alto Networks # #", "myconfig directory each run saves with a unique prefix name", "string with salt and configuration information. Suitable to place in", "the SHA512 Hashed secret for use as a password hash", "the output directory we want (panos / panorama) template_path =", "is False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime))", "foldertime) if os.path.isdir(myconfigdir) is False: os.mkdir(myconfigdir, mode=0o755) print('\\ncreated new archive", "in the phash field in the configurations ''' return sha256_crypt.hash(txt)", "context) template_save(filename, myconfig_path, config_type, element) print('\\nconfigs have been created and", "create snippets and full sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime)", "for the superuser username to be added into the configuration;", "as var_metadata: variables = oyaml.safe_load(var_metadata.read()) except IOError as ioe: print(f'Could", "then new subdirectories for configs :param myconfigdir_name: prefix folder name", "any # purpose with or without fee is hereby granted,", "OF THIS SOFTWARE. # Author: <NAME> <<EMAIL>> ''' Palo Alto", "= des_hash env.filters['sha512_hash'] = sha512_hash template = env.get_template(filename) rendered_template =", "def template_save(snippet_name, myconfigdir, config_type, element): ''' after rendering the template", "# grab the metadata values and convert to key-based dictionary", "not match. Please try again.\\n') # loop through all config", "save to the myconfig directory each run saves with a", "the superuser administrator account username: ') print('\\na phash will be", "of the template file :param template_path: path for the template", "a new main loadable_configs folder if required then new subdirectories", "return des_crypt.hash(txt) def sha256_hash(txt): ''' Returns the SHA256 Hashed secret", "in the configurations ''' return md5_crypt.hash(txt) def des_hash(txt): ''' Returns", "template_path, render_type, context): ''' render the jinja template using the", "folder name :return: the myconfigdir full path name ''' #", "sha512_hash(txt): ''' Returns the SHA512 Hashed secret for use as", "directory each run saves with a unique prefix name +", "account password: \") password2 = getpass.getpass(\"Enter password again to verify:", "variables from user input for snippet_var in input_var: context[snippet_var] =", "# define functions for custom jinja filters def md5_hash(txt): '''", "directory and output to my_config :param config_type: panos or panorama", "the render into the my_template folder var_file = 'loadable_config_vars/config_variables.yaml' if", "SOFTWARE. # Author: <NAME> <<EMAIL>> ''' Palo Alto Networks create_loadable_configs.py", "if render_type == 'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type) if render_type ==", "to the respective directories :param archivetime: datetimestamp used for the", "print(f'Could not open metadata file {config_var_file}') print(ioe) sys.exit() # grab", "config_type)) # append to the sys path for module lookup", "the superuser administrator account password: \") password2 = getpass.getpass(\"Enter password", "# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF", "getpass.getpass(\"Enter password again to verify: \") if password1 == password2:", "print(ioe) sys.exit() # grab the metadata values and convert to", "a unique prefix name + datetime :param snippet_name: name of", "my_config directory :param config_type: based on initial run list; eg.", "Please try again.\\n') # loop through all config types that", "and set conf file ''' print('..creating template for {0}'.format(filename)) env", "False: vfilesrc = var_file vfiledst = '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst)", "archive folder {0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type)) is False: os.mkdir('{0}/{1}'.format(myconfigdir,", "'config_variables.yaml' # create dict of values for the jinja template", "output subdir located in loadable_configs dir myconfig_path = myconfig_newdir(input_var['output_dir'], input_var['archive_time'])", "loadable_configs folder if required then new subdirectories for configs :param", "input_var): ''' get the input variables and render the output", "input_var['archive_time'] = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%d_%H%M%S') print('\\ndatetime used for folder creation: {0}\\n'.format(input_var['archive_time'])) #", "prefix name + datetime :param snippet_name: name of the output", "my_config files; no default admin/admin while passwordmatch is False: password1", "{0}\\n'.format(input_var['archive_time'])) # this prompts for the prefix name of the", "os import shutil import sys import time import getpass import", "return myconfigdir def create_context(config_var_file): # read the metafile to get", "of the output directory input_var['output_dir'] = input('Enter the name of", "and if not create a new one # then create", "os.path.abspath(os.path.join('..', 'templates', config_type)) # append to the sys path for", "and set commands for Panos and Panorama Edit the config_variables.yaml", "full path to the output directory we want (panos /", "exists and if not create a new one # then", "passlib.hash import des_crypt from passlib.hash import md5_crypt from passlib.hash import", "types that have their respective template folders for config_type in", "and output to my_config :param config_type: panos or panorama to", "PERFORMANCE OF THIS SOFTWARE. # Author: <NAME> <<EMAIL>> ''' Palo", "render_type, context): ''' render the jinja template using the context", "folder name :param render_type: type eg. if full or snippets;", "to be used as suffix of folder name :return: the", "the configs\\n') return if __name__ == '__main__': # Use the", "sub-directories myconfigdir = '{0}/{1}-{2}'.format(myconfigpath, myconfigdir_name, foldertime) if os.path.isdir(myconfigdir) is False:", "create a new main loadable_configs folder if required then new", "render_type == 'full': filename = 'iron_skillet_{0}_full.xml'.format(config_type) if render_type == 'set_commands':", "functions for custom jinja filters def md5_hash(txt): ''' Returns the", "panorama) template_path = os.path.abspath(os.path.join('..', 'templates', config_type)) # append to the", "* 80) print(' ') print('Welcome to Iron-Skillet'.center(80)) print(' ') print('='", "to be hashed :return: password hash of the string with", "os.mkdir(myconfigpath, mode=0o755) print('created new loadable config directory') # check that", "here, see the function defs below for reference env.filters['md5_hash'] =", "FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR", "# read the metafile to get variables and values try:", "for use as a password hash in the PanOS configuration", "input variables and render the output configs with jinja2 inputs", "80) print(' ') print('Welcome to Iron-Skillet'.center(80)) print(' ') print('=' *", "name of the output directory input_var['output_dir'] = input('Enter the name", "import sha512_crypt defined_filters = ['md5_hash', 'des_hash', 'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime):", "path to the config directory we want (panos / panorama)", "try: with open(config_var_file, 'r') as var_metadata: variables = oyaml.safe_load(var_metadata.read()) except", "var_file vfiledst = '{0}/{1}'.format(myconfigdir, var_file) shutil.copy(vfilesrc, vfiledst) return # define", "field in the configurations ''' return sha256_crypt.hash(txt) def sha512_hash(txt): '''", "passwordmatch = False # prompt for the superuser password to", "if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False: vfilesrc = var_file vfiledst =", "mode=0o755) print('\\ncreated new archive folder {0}-{1}'.format(myconfigdir_name, foldertime)) if os.path.isdir('{0}/{1}'.format(myconfigdir, config_type))", "IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.", "for {0}'.format(config_type)) return myconfigdir def create_context(config_var_file): # read the metafile", "{config_var_file}') print(ioe) sys.exit() # grab the metadata values and convert", "to the my_config directory :param config_type: based on initial run", ":param context: dict of variables to render :return: return the", ":param foldertime: datetime when script run; to be used as", "== '__main__': # Use the timestamp to create a unique", "os.path.isdir(myconfigpath) is False: os.mkdir(myconfigpath, mode=0o755) print('created new loadable config directory')", "mode=0o755) print('created new loadable config directory') # check that configs", "and values try: with open(config_var_file, 'r') as var_metadata: variables =", "the phash field in the configurations ''' return md5_crypt.hash(txt) def", "dict of variables to render :return: return the rendered xml", "value from config_variables.yaml :param filename: name of the template file", "template = env.get_template(filename) rendered_template = template.render(context) return rendered_template def template_save(snippet_name,", "permission notice appear in all copies. # # THE SOFTWARE", "context: dict of variables to render :return: return the rendered", "in all copies. # # THE SOFTWARE IS PROVIDED \"AS", "xml element rendered based on input variables; used as folder", "Returns the SHA256 Hashed secret for use as a password", "match. Please try again.\\n') # loop through all config types", "the jinja template using the context value from config_variables.yaml :param", "passlib.hash import md5_crypt from passlib.hash import sha256_crypt from passlib.hash import", "with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile: configfile.write(element) # copy", "metadata values used to render the configs\\n') return if __name__", "of values for the jinja template render context = create_context(config_variables)", "'sha512_hash'] def myconfig_newdir(myconfigdir_name, foldertime): ''' create a new main loadable_configs", "if not create a new one # then create snippets", "= oyaml.safe_load(var_metadata.read()) except IOError as ioe: print(f'Could not open metadata", "reference env.filters['md5_hash'] = md5_hash env.filters['des_hash'] = des_hash env.filters['sha512_hash'] = sha512_hash", "to the output directory we want (panos / panorama) template_path", "file :param template_path: path for the template file :param render_type:", "are read from the template directory and output to my_config", "append to the sys path for module lookup sys.path.append(template_path) #", "naming ''' config_variables = 'config_variables.yaml' # create dict of values", "<PASSWORD> passwordmatch = <PASSWORD> else: print('\\nPasswords do not match. Please", "grab the metadata values and convert to key-based dictionary jinja_context", "use as a password hash in the PanOS configuration :param", "# Author: <NAME> <<EMAIL>> ''' Palo Alto Networks create_loadable_configs.py Provides", "the metafile to get variables and values try: with open(config_var_file,", "for the output my_config folder naming ''' config_variables = 'config_variables.yaml'", "render context = create_context(config_variables) # update context dict with variables", "archive_time used as part of the my_config directory name input_var['archive_time']", "var_file = 'loadable_config_vars/config_variables.yaml' if os.path.isfile('{0}/{1}'.format(myconfigdir, var_file)) is False: vfilesrc =", "or without fee is hereby granted, provided that the above", "des_hash(txt): ''' Returns the DES Hashed secret for use as", "be created for superuser {0} and added to the config", "getpass import oyaml from jinja2 import Environment, FileSystemLoader from passlib.hash", "WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER", "context dict with variables from user input for snippet_var in", "config_variables.yaml :param filename: name of the template file :param template_path:", "# create dict of values for the jinja template render", "context[snippet_var] = input_var[snippet_var] # get the full path to the", "provided without support, warranty, or guarantee. Use at your own", "folder name :return: no value returned (future could be success", "OR PERFORMANCE OF THIS SOFTWARE. # Author: <NAME> <<EMAIL>> '''", "filename), 'w') as configfile: configfile.write(element) # copy the variables file", "import shutil import sys import time import getpass import oyaml", "new main loadable_configs folder if required then new subdirectories for", "filename: name of the template file :param template_path: path for", "render_type == 'set_commands': filename = 'iron_skillet_{0}_full.conf'.format(config_type) element = template_render(filename, template_path,", "information. Suitable to place in the phash field in the", "filename = snippet_name with open('{0}/{1}/{2}'.format(myconfigdir, config_type, filename), 'w') as configfile:", "my_config folder naming ''' config_variables = 'config_variables.yaml' # create dict", "in the configurations ''' return des_crypt.hash(txt) def sha256_hash(txt): ''' Returns", "for the template file :param render_type: type if full or", "render_type, context) template_save(filename, myconfig_path, config_type, element) print('\\nconfigs have been created", "from the template directory and output to my_config :param config_type:", "loadable config directory') # check that configs folder exists and", "granted, provided that the above # copyright notice and this" ]
[ "this config, so we store it here report.verbosity = item.config.option.verbose", "pytest_addoption(parser): group = parser.getgroup(\"pact specific options (pactman)\") group.addoption( \"--pact-files\", default=None,", "metafunc.config.getoption(\"pact_files\") if not pact_files_location: raise ValueError(\"need a --pact-broker-url or --pact-files", "of provider being verified\" ) group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer name", "> 0: out.line(\"Traceback:\", bold=True) return super().toterminal(out) else: out.line(\"Traceback not shown,", "interaction, consumer): self.publish_results = publish_results self.provider_version = provider_version self.interaction =", "consumer.interactions[-1] for interaction in consumer.interactions: if interaction is last: yield", "removed in the 3.0.0 release.\", DeprecationWarning, ) if filter_consumer_name: pacts", "but since it's not *expected* to fail an \"f\" is", "running verbose def pytest_report_header(config): if config.getoption(\"verbose\") > 0: location =", "extra_provider_headers) except (Failed, AssertionError) as e: raise Failed(str(e)) from None", "pact verification call interaction = item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item, call,", "custom TestReport subclass if we're reporting on a pact verification", "logging import os import warnings import pytest from _pytest.outcomes import", "action=\"store_true\", default=False, help=\"report pact verification results to pact broker\", )", "= \"passed\" return report def pytest_report_teststatus(report, config): if not hasattr(report,", "default=\"\", help=\"pact broker URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker bearer token\")", "--pact-consumer-name command-line option is deprecated \" \"and will be removed", "to - \" \"DEPRECATED, use --pact-verify-consumer instead\", ) group.addoption( \"--pact-verify-consumer\",", "if running verbose def pytest_report_header(config): if config.getoption(\"verbose\") > 0: location", "None def finish(self): if self.consumer and self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version)", "--pact-verify-consumer instead\", ) group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer name to limit", "request.param p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"), interaction, consumer, ) yield", "is last: yield (interaction, consumer) else: yield (interaction, None) def", "'git-tag' or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", # help=\"tag(s) that", "indirect=True) class PactTestReport(TestReport): \"\"\"Custom TestReport that allows us to attach", "else: out.line(\"Traceback not shown, use pytest -v to show it\")", "BrokerPact, BrokerPacts, PactBrokerConfig from .result import PytestResult, log def pytest_addoption(parser):", "Failed from _pytest.reports import TestReport from .broker_pact import BrokerPact, BrokerPacts,", "= broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name: filter_consumer_name =", "in metafunc.fixturenames: broker_url = get_broker_url(metafunc.config) if not broker_url: pact_files_location =", "[]): return # use our custom TestReport subclass if we're", "# help=\"tag(s) that should be applied to the consumer version", "default=\"\", help=\"pact broker bearer token\") group.addoption( \"--pact-provider-name\", default=None, help=\"pact name", "if config.getoption(\"verbose\") > 0: location = get_broker_url(config) or config.getoption(\"pact_files\") return", "def pytest_addoption(parser): group = parser.getgroup(\"pact specific options (pactman)\") group.addoption( \"--pact-files\",", "result_factory=PytestResult) def test_id(identifier): interaction, _ = identifier return str(interaction) def", "show it\") def pytest_runtest_makereport(item, call): if call.when != \"call\" or", "pytest_report_header(config): if config.getoption(\"verbose\") > 0: location = get_broker_url(config) or config.getoption(\"pact_files\")", "!= \"call\" or \"pact_verifier\" not in getattr(item, \"fixturenames\", []): return", "reasonably get at this config, so we store it here", "pytest -v to show it\") def pytest_runtest_makereport(item, call): if call.when", "yield BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier): interaction, _ = identifier return", "fail, which allows the run to pass report.wasxfail = True", "into an \"expected\" fail, which allows the run to pass", "is a little clearer return \"ignore fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture()", "or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") # add", "\"ignore fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig, request): interaction, consumer", "report def pytest_report_teststatus(report, config): if not hasattr(report, \"pact_interaction\"): return if", "failure details:\", bold=True) for text, kw in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw)", "if hasattr(report, \"wasxfail\"): # wasxfail usually displays an \"X\" but", "Future options to be implemented. Listing them here so naming", "\"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig, request): interaction, consumer = request.param p", "pytest output if running verbose def pytest_report_header(config): if config.getoption(\"verbose\") >", "to pact broker\") # group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer version to", "default=None, help=\"provider version to use when reporting pact results to", "pacts if pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True) class", "verification result ouput as well as the traceback of the", "pytest_report_teststatus(report, config): if not hasattr(report, \"pact_interaction\"): return if hasattr(report, \"wasxfail\"):", "as well as the traceback of the failure. \"\"\" @classmethod", "the 3.0.0 release.\", DeprecationWarning, ) if filter_consumer_name: pacts = [pact", "import TestReport from .broker_pact import BrokerPact, BrokerPacts, PactBrokerConfig from .result", "pass report.wasxfail = True report.outcome = \"passed\" return report def", "\"--pact-provider-name\", default=None, help=\"pact name of provider being verified\" ) group.addoption(", "if self.verbosity > 0: out.line(\"Traceback:\", bold=True) return super().toterminal(out) else: out.line(\"Traceback", "\"--pact-verify-consumer\", default=None, help=\"consumer name to limit pact verification to\" )", "if self.consumer and self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for", "shown, use pytest -v to show it\") def pytest_runtest_makereport(item, call):", "report.outcome = \"passed\" return report def pytest_report_teststatus(report, config): if not", "tag. May be \" \"specified multiple times in which case", "use pytest -v to show it\") def pytest_runtest_makereport(item, call): if", "self.publish_results = publish_results self.provider_version = provider_version self.interaction = interaction self.consumer", "group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", # help=\"tag(s) that should be applied to", "--pact-broker-url or --pact-files option\") pact_files = load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files),", "report.pact_interaction = interaction # the toterminal() call can't reasonably get", "interaction # the toterminal() call can't reasonably get at this", "= PactTestReport.from_item_and_call(item, call, interaction) if report.failed and item.config.getoption(\"pact_allow_fail\"): # convert", ") group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer name to limit pact verification", "group = parser.getgroup(\"pact specific options (pactman)\") group.addoption( \"--pact-files\", default=None, help=\"pact", "= [pact for pact in pacts if pact.consumer == filter_consumer_name]", "super().toterminal(out) else: out.line(\"Traceback not shown, use pytest -v to show", "\"tags will be verified.\", ) group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False, help=\"report", "from .broker_pact import BrokerPact, BrokerPacts, PactBrokerConfig from .result import PytestResult,", "the failure. \"\"\" @classmethod def from_item_and_call(cls, item, call, interaction): report", "fail into an \"expected\" fail, which allows the run to", "\"and will be removed in the 3.0.0 release.\", DeprecationWarning, )", "help=\"report pact verification results to pact broker\", ) group.addoption( \"--pact-provider-version\",", "use our custom TestReport subclass if we're reporting on a", "super().from_item_and_call(item, call) report.pact_interaction = interaction # the toterminal() call can't", "\"expected\" fail, which allows the run to pass report.wasxfail =", "those matching the tag. May be \" \"specified multiple times", "interaction) if report.failed and item.config.getoption(\"pact_allow_fail\"): # convert the fail into", "to the pytest output if running verbose def pytest_report_header(config): if", "_pytest.reports import TestReport from .broker_pact import BrokerPact, BrokerPacts, PactBrokerConfig from", "= True report.outcome = \"passed\" return report def pytest_report_teststatus(report, config):", "(Failed, AssertionError) as e: raise Failed(str(e)) from None def finish(self):", "if verbosity > 0: log.setLevel(logging.DEBUG) class PytestPactVerifier: def __init__(self, publish_results,", "def pact_verifier(pytestconfig, request): interaction, consumer = request.param p = PytestPactVerifier(", "if not pact_files_location: raise ValueError(\"need a --pact-broker-url or --pact-files option\")", "results to pact broker\", ) group.addoption( \"--pact-provider-version\", default=None, help=\"provider version", "that should be applied to the consumer version when pacts", "--pact-provider-name option\") broker = PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), )", "ids=test_id, indirect=True) class PactTestReport(TestReport): \"\"\"Custom TestReport that allows us to", "from _pytest.reports import TestReport from .broker_pact import BrokerPact, BrokerPacts, PactBrokerConfig", "to pact broker\", ) group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do not", "consistency can be a thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, #", "source 'git-tag' or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", # help=\"tag(s)", "None) def load_pact_files(file_location): for filename in glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename,", "the result, and then display the interaction's verification result ouput", "report.verbosity = item.config.option.verbose return report def toterminal(self, out): out.line(\"Pact failure", "multiple tags may be supplied\") def get_broker_url(config): return config.getoption(\"pact_broker_url\") or", "metavar=\"TAG\", action=\"append\", help=\"limit broker pacts verified to those matching the", "help=\"publish pacts to pact broker\") # group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer", "= publish_results self.provider_version = provider_version self.interaction = interaction self.consumer =", "the interaction's verification result ouput as well as the traceback", ") pacts = broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name:", "group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker bearer token\") group.addoption( \"--pact-provider-name\", default=None, help=\"pact", "default=False, action=\"store_true\", help=\"do not fail the pytest run if any", "os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") # add the", "use when publishing pacts to the broker\") # group.addoption(\"--pact-consumer-version-source\", default=None,", "return super().toterminal(out) else: out.line(\"Traceback not shown, use pytest -v to", "PactTestReport.from_item_and_call(item, call, interaction) if report.failed and item.config.getoption(\"pact_allow_fail\"): # convert the", "usually displays an \"X\" but since it's not *expected* to", "config.getoption(\"pact_files\") return [f\"Loading pacts from {location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers =", "import BrokerPact, BrokerPacts, PactBrokerConfig from .result import PytestResult, log def", "pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True) class PactTestReport(TestReport): \"\"\"Custom", "at this config, so we store it here report.verbosity =", "\" \"tags will be verified.\", ) group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False,", "help=\"tag(s) that should be applied to the consumer version when", "group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer name to limit pact verification to\"", "\" \"DEPRECATED, use --pact-verify-consumer instead\", ) group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer", "= provider_version self.interaction = interaction self.consumer = consumer def verify(self,", "if filter_consumer_name: warnings.warn( \"The --pact-consumer-name command-line option is deprecated \"", "to limit pact verification to - \" \"DEPRECATED, use --pact-verify-consumer", "pacts = [pact for pact in pacts if pact.consumer ==", "import glob import logging import os import warnings import pytest", "consumer version when pacts \" # \"are uploaded to the", "ValueError(\"need a --pact-broker-url or --pact-files option\") pact_files = load_pact_files(pact_files_location) metafunc.parametrize(", "PactBrokerConfig from .result import PytestResult, log def pytest_addoption(parser): group =", "return \"ignore fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig, request): interaction,", "= BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult ) pacts = broker_pacts.consumers() filter_consumer_name", "clearer return \"ignore fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig, request):", "\"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True ) else: provider_name = get_provider_name(metafunc.config) if", "last: yield (interaction, consumer) else: yield (interaction, None) def load_pact_files(file_location):", "call.when != \"call\" or \"pact_verifier\" not in getattr(item, \"fixturenames\", []):", "pact verification to\" ) group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit broker", "of the failure. \"\"\" @classmethod def from_item_and_call(cls, item, call, interaction):", "(wildcards allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker URL\") group.addoption(\"--pact-broker-token\", default=\"\",", "def pytest_configure(config): logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\") if", "call can't reasonably get at this config, so we store", "# Future options to be implemented. Listing them here so", "if filter_consumer_name: pacts = [pact for pact in pacts if", "consumer.interactions: if interaction is last: yield (interaction, consumer) else: yield", "help=\"pact broker URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker bearer token\") group.addoption(", "metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts = BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult", "when reporting pact results to pact broker\", ) group.addoption( \"--pact-allow-fail\",", "or config.getoption(\"pact_files\") return [f\"Loading pacts from {location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers", "self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for consumer in pacts: last = consumer.interactions[-1]", "== filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True) class PactTestReport(TestReport): \"\"\"Custom TestReport", "not in getattr(item, \"fixturenames\", []): return # use our custom", "our custom TestReport subclass if we're reporting on a pact", "not hasattr(report, \"pact_interaction\"): return if hasattr(report, \"wasxfail\"): # wasxfail usually", "hasattr(report, \"wasxfail\"): # wasxfail usually displays an \"X\" but since", "bold=True) return super().toterminal(out) else: out.line(\"Traceback not shown, use pytest -v", "if not filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn( \"The", "and then display the interaction's verification result ouput as well", "to\" ) group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit broker pacts verified", "metafunc.fixturenames: broker_url = get_broker_url(metafunc.config) if not broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\")", "the --pact-provider-name option\") broker = PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []),", "provider_url, provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except (Failed, AssertionError)", "store it here report.verbosity = item.config.option.verbose return report def toterminal(self,", "the fail into an \"expected\" fail, which allows the run", ".result import PytestResult, log def pytest_addoption(parser): group = parser.getgroup(\"pact specific", "# group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate consumer version from source 'git-tag'", "p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"), interaction, consumer, ) yield p", "= get_provider_name(metafunc.config) if not provider_name: raise ValueError(\"--pact-broker-url requires the --pact-provider-name", "= metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name:", "= item.config.option.verbose return report def toterminal(self, out): out.line(\"Pact failure details:\",", "broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts = BrokerPacts( provider_name, pact_broker=broker,", "group.addoption( \"--pact-provider-version\", default=None, help=\"provider version to use when reporting pact", "publish_results, provider_version, interaction, consumer): self.publish_results = publish_results self.provider_version = provider_version", "in glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier): interaction, _", "token\") group.addoption( \"--pact-provider-name\", default=None, help=\"pact name of provider being verified\"", "# wasxfail usually displays an \"X\" but since it's not", "from None def finish(self): if self.consumer and self.publish_results and self.provider_version:", "class PactTestReport(TestReport): \"\"\"Custom TestReport that allows us to attach an", "= item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item, call, interaction) if report.failed and", "get_broker_url(metafunc.config) if not broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\") if not pact_files_location:", "item.config.getoption(\"pact_allow_fail\"): # convert the fail into an \"expected\" fail, which", "help=\"consumer name to limit pact verification to\" ) group.addoption( \"--pact-verify-consumer-tag\",", "pact broker\", ) group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do not fail", ") else: provider_name = get_provider_name(metafunc.config) if not provider_name: raise ValueError(\"--pact-broker-url", "when pacts \" # \"are uploaded to the broker; multiple", "if interaction is last: yield (interaction, consumer) else: yield (interaction,", "well as the traceback of the failure. \"\"\" @classmethod def", "bearer token\") group.addoption( \"--pact-provider-name\", default=None, help=\"pact name of provider being", "return [f\"Loading pacts from {location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers = []", "pact verification results to pact broker\", ) group.addoption( \"--pact-provider-version\", default=None,", "be removed in the 3.0.0 release.\", DeprecationWarning, ) if filter_consumer_name:", "traceback of the failure. \"\"\" @classmethod def from_item_and_call(cls, item, call,", "verified to those matching the tag. May be \" \"specified", "get_provider_name(metafunc.config) if not provider_name: raise ValueError(\"--pact-broker-url requires the --pact-provider-name option\")", "e: raise Failed(str(e)) from None def finish(self): if self.consumer and", "self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if self.verbosity > 0: out.line(\"Traceback:\", bold=True) return", "[] logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\") if verbosity > 0: log.setLevel(logging.DEBUG)", "pacts to the broker\") # group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate consumer", "\"are uploaded to the broker; multiple tags may be supplied\")", "broker\") # group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate consumer version from source", "provider_version self.interaction = interaction self.consumer = consumer def verify(self, provider_url,", "glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier): interaction, _ =", "metavar='TAG', action=\"append\", # help=\"tag(s) that should be applied to the", "PytestPactVerifier: def __init__(self, publish_results, provider_version, interaction, consumer): self.publish_results = publish_results", "as e: raise Failed(str(e)) from None def finish(self): if self.consumer", "flatten_pacts(pacts): for consumer in pacts: last = consumer.interactions[-1] for interaction", "option is deprecated \" \"and will be removed in the", "consumer = request.param p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"), interaction, consumer,", "should be applied to the consumer version when pacts \"", "metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn(", "pytest_runtest_makereport(item, call): if call.when != \"call\" or \"pact_verifier\" not in", "flatten_pacts(pacts), ids=test_id, indirect=True) class PactTestReport(TestReport): \"\"\"Custom TestReport that allows us", "uploaded to the broker; multiple tags may be supplied\") def", "help=\"pact broker bearer token\") group.addoption( \"--pact-provider-name\", default=None, help=\"pact name of", "or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", # help=\"tag(s) that should", "self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except (Failed, AssertionError) as e: raise Failed(str(e))", "\" \"specified multiple times in which case pacts matching any", ") group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit broker pacts verified to", "verbose def pytest_report_header(config): if config.getoption(\"verbose\") > 0: location = get_broker_url(config)", "broker_pacts = BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult ) pacts = broker_pacts.consumers()", "verification to - \" \"DEPRECATED, use --pact-verify-consumer instead\", ) group.addoption(", "for text, kw in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if self.verbosity >", "pytest from _pytest.outcomes import Failed from _pytest.reports import TestReport from", "to the consumer version when pacts \" # \"are uploaded", "bold=True) for text, kw in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if self.verbosity", "# help=\"generate consumer version from source 'git-tag' or 'git-hash'\") #", "from source 'git-tag' or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", #", "publishing pacts to the broker\") # group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate", "logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\") if verbosity > 0: log.setLevel(logging.DEBUG) class", "0: log.setLevel(logging.DEBUG) class PytestPactVerifier: def __init__(self, publish_results, provider_version, interaction, consumer):", "_ = identifier return str(interaction) def pytest_generate_tests(metafunc): if \"pact_verifier\" in", "to the broker; multiple tags may be supplied\") def get_broker_url(config):", "= consumer def verify(self, provider_url, provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup,", "here report.verbosity = item.config.option.verbose return report def toterminal(self, out): out.line(\"Pact", "config.getoption(\"verbose\") if verbosity > 0: log.setLevel(logging.DEBUG) class PytestPactVerifier: def __init__(self,", "filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True) class PactTestReport(TestReport): \"\"\"Custom TestReport that", "toterminal(self, out): out.line(\"Pact failure details:\", bold=True) for text, kw in", "pacts matching any of these \" \"tags will be verified.\",", "or os.environ.get(\"PACT_PROVIDER_NAME\") # add the pact broker URL to the", "group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer name to limit pact verification to", "help=\"consumer name to limit pact verification to - \" \"DEPRECATED,", "to the broker\") # group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate consumer version", "metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn( \"The --pact-consumer-name command-line option is deprecated", "= get_broker_url(metafunc.config) if not broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\") if not", "command-line option is deprecated \" \"and will be removed in", "return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\")", "TestReport that allows us to attach an interaction to the", "raise Failed(str(e)) from None def finish(self): if self.consumer and self.publish_results", "if not hasattr(report, \"pact_interaction\"): return if hasattr(report, \"wasxfail\"): # wasxfail", "to limit pact verification to\" ) group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\",", "\"f\" is a little clearer return \"ignore fail\", \"f\", \"IGNORE_FAIL\"", "and item.config.getoption(\"pact_allow_fail\"): # convert the fail into an \"expected\" fail,", "it's not *expected* to fail an \"f\" is a little", "config): if not hasattr(report, \"pact_interaction\"): return if hasattr(report, \"wasxfail\"): #", "DeprecationWarning, ) if filter_consumer_name: pacts = [pact for pact in", "default=None, help=\"consumer name to limit pact verification to\" ) group.addoption(", "self.consumer and self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for consumer", "\"pact_verifier\" in metafunc.fixturenames: broker_url = get_broker_url(metafunc.config) if not broker_url: pact_files_location", "item.config.option.verbose return report def toterminal(self, out): out.line(\"Pact failure details:\", bold=True)", "> 0: location = get_broker_url(config) or config.getoption(\"pact_files\") return [f\"Loading pacts", "except (Failed, AssertionError) as e: raise Failed(str(e)) from None def", "details:\", bold=True) for text, kw in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if", "interaction, consumer = request.param p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"), interaction,", ") group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do not fail the pytest", "help=\"provider version to use when reporting pact results to pact", "broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\")", "from {location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\") verbosity =", "(pactman)\") group.addoption( \"--pact-files\", default=None, help=\"pact JSON files to verify (wildcards", "a thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, # help=\"publish pacts to", "request): interaction, consumer = request.param p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"),", "in the 3.0.0 release.\", DeprecationWarning, ) if filter_consumer_name: pacts =", "\"f\", \"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig, request): interaction, consumer = request.param", "group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False, help=\"report pact verification results to pact", "pact broker URL to the pytest output if running verbose", "ids=test_id, indirect=True ) else: provider_name = get_provider_name(metafunc.config) if not provider_name:", "class PytestPactVerifier: def __init__(self, publish_results, provider_version, interaction, consumer): self.publish_results =", "def pytest_generate_tests(metafunc): if \"pact_verifier\" in metafunc.fixturenames: broker_url = get_broker_url(metafunc.config) if", "log def pytest_addoption(parser): group = parser.getgroup(\"pact specific options (pactman)\") group.addoption(", "pact_files_location = metafunc.config.getoption(\"pact_files\") if not pact_files_location: raise ValueError(\"need a --pact-broker-url", "not pact_files_location: raise ValueError(\"need a --pact-broker-url or --pact-files option\") pact_files", "may be supplied\") def get_broker_url(config): return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def", "else: yield (interaction, None) def load_pact_files(file_location): for filename in glob.glob(file_location,", "results to pact broker\", ) group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do", "def verify(self, provider_url, provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except", "these \" \"tags will be verified.\", ) group.addoption( \"--pact-publish-results\", action=\"store_true\",", "@classmethod def from_item_and_call(cls, item, call, interaction): report = super().from_item_and_call(item, call)", "consumer in pacts: last = consumer.interactions[-1] for interaction in consumer.interactions:", "we're reporting on a pact verification call interaction = item.funcargs[\"pact_verifier\"].interaction", "def load_pact_files(file_location): for filename in glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult)", "return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") # add the pact broker URL", "\"--pact-consumer-name\", default=None, help=\"consumer name to limit pact verification to -", "allows us to attach an interaction to the result, and", "call): if call.when != \"call\" or \"pact_verifier\" not in getattr(item,", "broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\") if not pact_files_location: raise ValueError(\"need a", "broker pacts verified to those matching the tag. May be", "an interaction to the result, and then display the interaction's", "help=\"consumer version to use when publishing pacts to the broker\")", "broker URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker bearer token\") group.addoption( \"--pact-provider-name\",", "import warnings import pytest from _pytest.outcomes import Failed from _pytest.reports", "to use when reporting pact results to pact broker\", )", "= PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts = BrokerPacts(", "action=\"append\", help=\"limit broker pacts verified to those matching the tag.", "interaction to the result, and then display the interaction's verification", "so we store it here report.verbosity = item.config.option.verbose return report", "URL to the pytest output if running verbose def pytest_report_header(config):", "option\") broker = PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts", "and self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for consumer in", "out.line(text, **kw) if self.verbosity > 0: out.line(\"Traceback:\", bold=True) return super().toterminal(out)", "if pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True) class PactTestReport(TestReport):", "import PytestResult, log def pytest_addoption(parser): group = parser.getgroup(\"pact specific options", "default=None, help=\"pact name of provider being verified\" ) group.addoption( \"--pact-consumer-name\",", "default=None, help=\"pact JSON files to verify (wildcards allowed)\" ) group.addoption(\"--pact-broker-url\",", "the tag. May be \" \"specified multiple times in which", "fail verification\", ) # Future options to be implemented. Listing", "= interaction self.consumer = consumer def verify(self, provider_url, provider_setup, extra_provider_headers={}):", "default=False, # help=\"publish pacts to pact broker\") # group.addoption(\"--pact-consumer-version\", default=None,", "self.verbosity > 0: out.line(\"Traceback:\", bold=True) return super().toterminal(out) else: out.line(\"Traceback not", "interaction = item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item, call, interaction) if report.failed", "verbosity > 0: log.setLevel(logging.DEBUG) class PytestPactVerifier: def __init__(self, publish_results, provider_version,", "fail the pytest run if any pacts fail verification\", )", ") group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker", "verify (wildcards allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker URL\") group.addoption(\"--pact-broker-token\",", "\"call\" or \"pact_verifier\" not in getattr(item, \"fixturenames\", []): return #", "action=\"store_true\", default=False, # help=\"publish pacts to pact broker\") # group.addoption(\"--pact-consumer-version\",", "provider_version, interaction, consumer): self.publish_results = publish_results self.provider_version = provider_version self.interaction", "-v to show it\") def pytest_runtest_makereport(item, call): if call.when !=", "consumer) else: yield (interaction, None) def load_pact_files(file_location): for filename in", "getattr(item, \"fixturenames\", []): return # use our custom TestReport subclass", "from_item_and_call(cls, item, call, interaction): report = super().from_item_and_call(item, call) report.pact_interaction =", "name to limit pact verification to - \" \"DEPRECATED, use", "self.interaction = interaction self.consumer = consumer def verify(self, provider_url, provider_setup,", "\"\"\"Custom TestReport that allows us to attach an interaction to", "\"pact_verifier\" not in getattr(item, \"fixturenames\", []): return # use our", "fail an \"f\" is a little clearer return \"ignore fail\",", "broker_url = get_broker_url(metafunc.config) if not broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\") if", "interaction's verification result ouput as well as the traceback of", "load_pact_files(file_location): for filename in glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult) def", "May be \" \"specified multiple times in which case pacts", "consumer version from source 'git-tag' or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG',", "of these \" \"tags will be verified.\", ) group.addoption( \"--pact-publish-results\",", "import os import warnings import pytest from _pytest.outcomes import Failed", "provider_name = get_provider_name(metafunc.config) if not provider_name: raise ValueError(\"--pact-broker-url requires the", "= get_broker_url(config) or config.getoption(\"pact_files\") return [f\"Loading pacts from {location}\"] def", "last = consumer.interactions[-1] for interaction in consumer.interactions: if interaction is", "return # use our custom TestReport subclass if we're reporting", "call, interaction) if report.failed and item.config.getoption(\"pact_allow_fail\"): # convert the fail", "run if any pacts fail verification\", ) # Future options", "pact_verifier(pytestconfig, request): interaction, consumer = request.param p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"),", "out.line(\"Traceback not shown, use pytest -v to show it\") def", "out.line(\"Traceback:\", bold=True) return super().toterminal(out) else: out.line(\"Traceback not shown, use pytest", "to fail an \"f\" is a little clearer return \"ignore", "warnings import pytest from _pytest.outcomes import Failed from _pytest.reports import", "__init__(self, publish_results, provider_version, interaction, consumer): self.publish_results = publish_results self.provider_version =", "def pytest_report_header(config): if config.getoption(\"verbose\") > 0: location = get_broker_url(config) or", "matching any of these \" \"tags will be verified.\", )", "for filename in glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier):", "\"wasxfail\"): # wasxfail usually displays an \"X\" but since it's", "pytest_generate_tests(metafunc): if \"pact_verifier\" in metafunc.fixturenames: broker_url = get_broker_url(metafunc.config) if not", "not broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\") if not pact_files_location: raise ValueError(\"need", "0: out.line(\"Traceback:\", bold=True) return super().toterminal(out) else: out.line(\"Traceback not shown, use", "pact results to pact broker\", ) group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\",", "URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker bearer token\") group.addoption( \"--pact-provider-name\", default=None,", "it\") def pytest_runtest_makereport(item, call): if call.when != \"call\" or \"pact_verifier\"", "pacts from {location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\") verbosity", "\"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do not fail the pytest run if", "extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except (Failed, AssertionError) as e:", "PytestResult, log def pytest_addoption(parser): group = parser.getgroup(\"pact specific options (pactman)\")", "TestReport from .broker_pact import BrokerPact, BrokerPacts, PactBrokerConfig from .result import", "\"--pact-publish-results\", action=\"store_true\", default=False, help=\"report pact verification results to pact broker\",", "pact verification to - \" \"DEPRECATED, use --pact-verify-consumer instead\", )", "verification call interaction = item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item, call, interaction)", "us to attach an interaction to the result, and then", "if \"pact_verifier\" in metafunc.fixturenames: broker_url = get_broker_url(metafunc.config) if not broker_url:", "in pacts if pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True)", "broker bearer token\") group.addoption( \"--pact-provider-name\", default=None, help=\"pact name of provider", "specific options (pactman)\") group.addoption( \"--pact-files\", default=None, help=\"pact JSON files to", "report.failed and item.config.getoption(\"pact_allow_fail\"): # convert the fail into an \"expected\"", "limit pact verification to - \" \"DEPRECATED, use --pact-verify-consumer instead\",", "name of provider being verified\" ) group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer", "out.line(\"Pact failure details:\", bold=True) for text, kw in self.pact_interaction.result.results_for_terminal(): out.line(text,", "reporting pact results to pact broker\", ) group.addoption( \"--pact-allow-fail\", default=False,", "def pytest_report_teststatus(report, config): if not hasattr(report, \"pact_interaction\"): return if hasattr(report,", "if report.failed and item.config.getoption(\"pact_allow_fail\"): # convert the fail into an", "provider being verified\" ) group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer name to", "failure. \"\"\" @classmethod def from_item_and_call(cls, item, call, interaction): report =", "supplied\") def get_broker_url(config): return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return", "JSON files to verify (wildcards allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact", ") # Future options to be implemented. Listing them here", "to those matching the tag. May be \" \"specified multiple", "requires the --pact-provider-name option\") broker = PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\",", "\" \"and will be removed in the 3.0.0 release.\", DeprecationWarning,", "to verify (wildcards allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker URL\")", "# group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, # help=\"publish pacts to pact broker\")", "allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact", "applied to the consumer version when pacts \" # \"are", "\"\"\" @classmethod def from_item_and_call(cls, item, call, interaction): report = super().from_item_and_call(item,", "in getattr(item, \"fixturenames\", []): return # use our custom TestReport", "def get_broker_url(config): return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return config.getoption(\"pact_provider_name\")", "consumer def verify(self, provider_url, provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers)", "def flatten_pacts(pacts): for consumer in pacts: last = consumer.interactions[-1] for", "(interaction, consumer) else: yield (interaction, None) def load_pact_files(file_location): for filename", "\"The --pact-consumer-name command-line option is deprecated \" \"and will be", "item, call, interaction): report = super().from_item_and_call(item, call) report.pact_interaction = interaction", "else: provider_name = get_provider_name(metafunc.config) if not provider_name: raise ValueError(\"--pact-broker-url requires", "toterminal() call can't reasonably get at this config, so we", "kw in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if self.verbosity > 0: out.line(\"Traceback:\",", "files to verify (wildcards allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker", ".broker_pact import BrokerPact, BrokerPacts, PactBrokerConfig from .result import PytestResult, log", "version when pacts \" # \"are uploaded to the broker;", "pytest run if any pacts fail verification\", ) # Future", "matching the tag. May be \" \"specified multiple times in", "item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item, call, interaction) if report.failed and item.config.getoption(\"pact_allow_fail\"):", "to be implemented. Listing them here so naming consistency can", "= request.param p = PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"), interaction, consumer, )", "metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id, indirect=True) class PactTestReport(TestReport): \"\"\"Custom TestReport that allows", "group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do not fail the pytest run", "action=\"append\", # help=\"tag(s) that should be applied to the consumer", ") broker_pacts = BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult ) pacts =", "self.consumer = consumer def verify(self, provider_url, provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url,", "verbosity = config.getoption(\"verbose\") if verbosity > 0: log.setLevel(logging.DEBUG) class PytestPactVerifier:", "not provider_name: raise ValueError(\"--pact-broker-url requires the --pact-provider-name option\") broker =", "the consumer version when pacts \" # \"are uploaded to", "when publishing pacts to the broker\") # group.addoption(\"--pact-consumer-version-source\", default=None, #", "location = get_broker_url(config) or config.getoption(\"pact_files\") return [f\"Loading pacts from {location}\"]", "group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate consumer version from source 'git-tag' or", "import pytest from _pytest.outcomes import Failed from _pytest.reports import TestReport", "= metafunc.config.getoption(\"pact_files\") if not pact_files_location: raise ValueError(\"need a --pact-broker-url or", "will be removed in the 3.0.0 release.\", DeprecationWarning, ) if", "get_broker_url(config): return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return config.getoption(\"pact_provider_name\") or", "metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True ) else: provider_name = get_provider_name(metafunc.config)", "interaction): report = super().from_item_and_call(item, call) report.pact_interaction = interaction # the", "help=\"pact name of provider being verified\" ) group.addoption( \"--pact-consumer-name\", default=None,", "hasattr(report, \"pact_interaction\"): return if hasattr(report, \"wasxfail\"): # wasxfail usually displays", "an \"expected\" fail, which allows the run to pass report.wasxfail", "warnings.warn( \"The --pact-consumer-name command-line option is deprecated \" \"and will", "test_id(identifier): interaction, _ = identifier return str(interaction) def pytest_generate_tests(metafunc): if", "import logging import os import warnings import pytest from _pytest.outcomes", "reporting on a pact verification call interaction = item.funcargs[\"pact_verifier\"].interaction report", "implemented. Listing them here so naming consistency can be a", "--pact-files option\") pact_files = load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True", "call, interaction): report = super().from_item_and_call(item, call) report.pact_interaction = interaction #", "parser.getgroup(\"pact specific options (pactman)\") group.addoption( \"--pact-files\", default=None, help=\"pact JSON files", "or \"pact_verifier\" not in getattr(item, \"fixturenames\", []): return # use", "not *expected* to fail an \"f\" is a little clearer", "thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, # help=\"publish pacts to pact", "the toterminal() call can't reasonably get at this config, so", "be implemented. Listing them here so naming consistency can be", "provider_name, pact_broker=broker, result_factory=PytestResult ) pacts = broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\")", "[pact for pact in pacts if pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\",", "Listing them here so naming consistency can be a thing.", "yield (interaction, None) def load_pact_files(file_location): for filename in glob.glob(file_location, recursive=True):", "from .result import PytestResult, log def pytest_addoption(parser): group = parser.getgroup(\"pact", "ouput as well as the traceback of the failure. \"\"\"", "report = PactTestReport.from_item_and_call(item, call, interaction) if report.failed and item.config.getoption(\"pact_allow_fail\"): #", "verified.\", ) group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False, help=\"report pact verification results", "\"DEPRECATED, use --pact-verify-consumer instead\", ) group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer name", "result ouput as well as the traceback of the failure.", "verify(self, provider_url, provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except (Failed,", "times in which case pacts matching any of these \"", "default=None, help=\"consumer name to limit pact verification to - \"", "(interaction, None) def load_pact_files(file_location): for filename in glob.glob(file_location, recursive=True): yield", "glob import logging import os import warnings import pytest from", "\"specified multiple times in which case pacts matching any of", "pytest_configure(config): logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\") if verbosity", "self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for consumer in pacts:", "TestReport subclass if we're reporting on a pact verification call", "display the interaction's verification result ouput as well as the", "since it's not *expected* to fail an \"f\" is a", "default=False, help=\"report pact verification results to pact broker\", ) group.addoption(", "BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult ) pacts = broker_pacts.consumers() filter_consumer_name =", "get_broker_url(config) or config.getoption(\"pact_files\") return [f\"Loading pacts from {location}\"] def pytest_configure(config):", "3.0.0 release.\", DeprecationWarning, ) if filter_consumer_name: pacts = [pact for", "if call.when != \"call\" or \"pact_verifier\" not in getattr(item, \"fixturenames\",", "text, kw in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if self.verbosity > 0:", "a little clearer return \"ignore fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture() def", "group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer version to use when publishing pacts", "if not broker_url: pact_files_location = metafunc.config.getoption(\"pact_files\") if not pact_files_location: raise", "if not provider_name: raise ValueError(\"--pact-broker-url requires the --pact-provider-name option\") broker", "def finish(self): if self.consumer and self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version) def", "displays an \"X\" but since it's not *expected* to fail", "def get_provider_name(config): return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") # add the pact", "an \"X\" but since it's not *expected* to fail an", "def __init__(self, publish_results, provider_version, interaction, consumer): self.publish_results = publish_results self.provider_version", "publish_results self.provider_version = provider_version self.interaction = interaction self.consumer = consumer", "identifier return str(interaction) def pytest_generate_tests(metafunc): if \"pact_verifier\" in metafunc.fixturenames: broker_url", "config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") # add the pact broker URL to", "group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, # help=\"publish pacts to pact broker\") #", "in pacts: last = consumer.interactions[-1] for interaction in consumer.interactions: if", "use when reporting pact results to pact broker\", ) group.addoption(", "PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts = BrokerPacts( provider_name,", "filename in glob.glob(file_location, recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier): interaction,", "we store it here report.verbosity = item.config.option.verbose return report def", "case pacts matching any of these \" \"tags will be", "version to use when reporting pact results to pact broker\",", "group.addoption( \"--pact-files\", default=None, help=\"pact JSON files to verify (wildcards allowed)\"", "group.addoption( \"--pact-provider-name\", default=None, help=\"pact name of provider being verified\" )", "version from source 'git-tag' or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\",", "recursive=True): yield BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier): interaction, _ = identifier", "PactTestReport(TestReport): \"\"\"Custom TestReport that allows us to attach an interaction", "= PytestPactVerifier( pytestconfig.getoption(\"pact_publish_results\"), pytestconfig.getoption(\"pact_provider_version\"), interaction, consumer, ) yield p p.finish()", "interaction in consumer.interactions: if interaction is last: yield (interaction, consumer)", "pacts = broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name: filter_consumer_name", "broker\") # group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer version to use when", "naming consistency can be a thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False,", "consumer): self.publish_results = publish_results self.provider_version = provider_version self.interaction = interaction", "interaction, _ = identifier return str(interaction) def pytest_generate_tests(metafunc): if \"pact_verifier\"", "call) report.pact_interaction = interaction # the toterminal() call can't reasonably", "and self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for consumer in pacts: last", "allows the run to pass report.wasxfail = True report.outcome =", "not fail the pytest run if any pacts fail verification\",", "help=\"do not fail the pytest run if any pacts fail", "# help=\"publish pacts to pact broker\") # group.addoption(\"--pact-consumer-version\", default=None, #", "config.getoption(\"verbose\") > 0: location = get_broker_url(config) or config.getoption(\"pact_files\") return [f\"Loading", "in which case pacts matching any of these \" \"tags", "report = super().from_item_and_call(item, call) report.pact_interaction = interaction # the toterminal()", "output if running verbose def pytest_report_header(config): if config.getoption(\"verbose\") > 0:", "default=None, # help=\"generate consumer version from source 'git-tag' or 'git-hash'\")", "can be a thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, # help=\"publish", "= interaction # the toterminal() call can't reasonably get at", "on a pact verification call interaction = item.funcargs[\"pact_verifier\"].interaction report =", "raise ValueError(\"--pact-broker-url requires the --pact-provider-name option\") broker = PactBrokerConfig( broker_url,", "provider_setup, extra_provider_headers) except (Failed, AssertionError) as e: raise Failed(str(e)) from", "multiple times in which case pacts matching any of these", "\"--pact-provider-version\", default=None, help=\"provider version to use when reporting pact results", "convert the fail into an \"expected\" fail, which allows the", "be \" \"specified multiple times in which case pacts matching", "self.provider_version = provider_version self.interaction = interaction self.consumer = consumer def", ") group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer name to limit pact verification", "for interaction in consumer.interactions: if interaction is last: yield (interaction,", "\"passed\" return report def pytest_report_teststatus(report, config): if not hasattr(report, \"pact_interaction\"):", "logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\") if verbosity >", "for pact in pacts if pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts),", "group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit broker pacts verified to those", "to attach an interaction to the result, and then display", "def pytest_runtest_makereport(item, call): if call.when != \"call\" or \"pact_verifier\" not", "it here report.verbosity = item.config.option.verbose return report def toterminal(self, out):", "that allows us to attach an interaction to the result,", "little clearer return \"ignore fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig,", "the broker\") # group.addoption(\"--pact-consumer-version-source\", default=None, # help=\"generate consumer version from", "can't reasonably get at this config, so we store it", "pacts \" # \"are uploaded to the broker; multiple tags", "@pytest.fixture() def pact_verifier(pytestconfig, request): interaction, consumer = request.param p =", "use --pact-verify-consumer instead\", ) group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer name to", "\"--pact-files\", default=None, help=\"pact JSON files to verify (wildcards allowed)\" )", "metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts = BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult )", "[]), ) broker_pacts = BrokerPacts( provider_name, pact_broker=broker, result_factory=PytestResult ) pacts", "the pytest run if any pacts fail verification\", ) #", "the run to pass report.wasxfail = True report.outcome = \"passed\"", "= parser.getgroup(\"pact specific options (pactman)\") group.addoption( \"--pact-files\", default=None, help=\"pact JSON", "filter_consumer_name: warnings.warn( \"The --pact-consumer-name command-line option is deprecated \" \"and", "help=\"pact JSON files to verify (wildcards allowed)\" ) group.addoption(\"--pact-broker-url\", default=\"\",", "option\") pact_files = load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True )", "wasxfail usually displays an \"X\" but since it's not *expected*", "str(interaction) def pytest_generate_tests(metafunc): if \"pact_verifier\" in metafunc.fixturenames: broker_url = get_broker_url(metafunc.config)", "filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn( \"The --pact-consumer-name command-line", "add the pact broker URL to the pytest output if", "flatten_pacts(pact_files), ids=test_id, indirect=True ) else: provider_name = get_provider_name(metafunc.config) if not", "default=None, # help=\"consumer version to use when publishing pacts to", "{location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\")", "a pact verification call interaction = item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item,", "will be verified.\", ) group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False, help=\"report pact", "release.\", DeprecationWarning, ) if filter_consumer_name: pacts = [pact for pact", "\"fixturenames\", []): return # use our custom TestReport subclass if", "then display the interaction's verification result ouput as well as", "action=\"store_true\", help=\"do not fail the pytest run if any pacts", "the pact broker URL to the pytest output if running", "verified\" ) group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer name to limit pact", "name to limit pact verification to\" ) group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\",", "filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if not filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if", "pact_files = load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True ) else:", "= super().from_item_and_call(item, call) report.pact_interaction = interaction # the toterminal() call", "= consumer.interactions[-1] for interaction in consumer.interactions: if interaction is last:", "# convert the fail into an \"expected\" fail, which allows", "pacts fail verification\", ) # Future options to be implemented.", "\"X\" but since it's not *expected* to fail an \"f\"", "raise ValueError(\"need a --pact-broker-url or --pact-files option\") pact_files = load_pact_files(pact_files_location)", "help=\"generate consumer version from source 'git-tag' or 'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\",", "be supplied\") def get_broker_url(config): return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config):", "be applied to the consumer version when pacts \" #", "# the toterminal() call can't reasonably get at this config,", "'git-hash'\") # group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", # help=\"tag(s) that should be", "provider_setup, extra_provider_headers={}): try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except (Failed, AssertionError) as", "try: self.interaction.verify_with_callable_setup(provider_url, provider_setup, extra_provider_headers) except (Failed, AssertionError) as e: raise", "here so naming consistency can be a thing. # group.addoption(\"--pact-publish-pacts\",", "run to pass report.wasxfail = True report.outcome = \"passed\" return", "ValueError(\"--pact-broker-url requires the --pact-provider-name option\") broker = PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"),", "pacts verified to those matching the tag. May be \"", "if any pacts fail verification\", ) # Future options to", "load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True ) else: provider_name =", "= [] logging.basicConfig(format=\"%(message)s\") verbosity = config.getoption(\"verbose\") if verbosity > 0:", "get at this config, so we store it here report.verbosity", "result, and then display the interaction's verification result ouput as", "out): out.line(\"Pact failure details:\", bold=True) for text, kw in self.pact_interaction.result.results_for_terminal():", "verification results to pact broker\", ) group.addoption( \"--pact-provider-version\", default=None, help=\"provider", "a --pact-broker-url or --pact-files option\") pact_files = load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\",", "subclass if we're reporting on a pact verification call interaction", "provider_name: raise ValueError(\"--pact-broker-url requires the --pact-provider-name option\") broker = PactBrokerConfig(", "options to be implemented. Listing them here so naming consistency", "def test_id(identifier): interaction, _ = identifier return str(interaction) def pytest_generate_tests(metafunc):", "instead\", ) group.addoption( \"--pact-verify-consumer\", default=None, help=\"consumer name to limit pact", "return report def pytest_report_teststatus(report, config): if not hasattr(report, \"pact_interaction\"): return", "be verified.\", ) group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False, help=\"report pact verification", "verification to\" ) group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit broker pacts", "True report.outcome = \"passed\" return report def pytest_report_teststatus(report, config): if", "any pacts fail verification\", ) # Future options to be", "\"pact_interaction\"): return if hasattr(report, \"wasxfail\"): # wasxfail usually displays an", "not shown, use pytest -v to show it\") def pytest_runtest_makereport(item,", "> 0: log.setLevel(logging.DEBUG) class PytestPactVerifier: def __init__(self, publish_results, provider_version, interaction,", ") group.addoption( \"--pact-publish-results\", action=\"store_true\", default=False, help=\"report pact verification results to", "version to use when publishing pacts to the broker\") #", "= metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn( \"The --pact-consumer-name command-line option is", "def from_item_and_call(cls, item, call, interaction): report = super().from_item_and_call(item, call) report.pact_interaction", "os import warnings import pytest from _pytest.outcomes import Failed from", "\"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit broker pacts verified to those matching", "limit pact verification to\" ) group.addoption( \"--pact-verify-consumer-tag\", metavar=\"TAG\", action=\"append\", help=\"limit", "BrokerPact.load_file(filename, result_factory=PytestResult) def test_id(identifier): interaction, _ = identifier return str(interaction)", "**kw) if self.verbosity > 0: out.line(\"Traceback:\", bold=True) return super().toterminal(out) else:", "broker; multiple tags may be supplied\") def get_broker_url(config): return config.getoption(\"pact_broker_url\")", "so naming consistency can be a thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\",", "# group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer version to use when publishing", "broker = PactBrokerConfig( broker_url, metafunc.config.getoption(\"pact_broker_token\"), metafunc.config.getoption(\"pact_verify_consumer_tag\", []), ) broker_pacts =", "# help=\"consumer version to use when publishing pacts to the", "# add the pact broker URL to the pytest output", "to show it\") def pytest_runtest_makereport(item, call): if call.when != \"call\"", "in consumer.interactions: if interaction is last: yield (interaction, consumer) else:", "group.addoption(\"--pact-broker-url\", default=\"\", help=\"pact broker URL\") group.addoption(\"--pact-broker-token\", default=\"\", help=\"pact broker bearer", ") if filter_consumer_name: pacts = [pact for pact in pacts", "pacts to pact broker\") # group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer version", "indirect=True ) else: provider_name = get_provider_name(metafunc.config) if not provider_name: raise", "which case pacts matching any of these \" \"tags will", "broker URL to the pytest output if running verbose def", "pact_broker=broker, result_factory=PytestResult ) pacts = broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if", "be a thing. # group.addoption(\"--pact-publish-pacts\", action=\"store_true\", default=False, # help=\"publish pacts", "AssertionError) as e: raise Failed(str(e)) from None def finish(self): if", "for consumer in pacts: last = consumer.interactions[-1] for interaction in", "the pytest output if running verbose def pytest_report_header(config): if config.getoption(\"verbose\")", "yield (interaction, consumer) else: yield (interaction, None) def load_pact_files(file_location): for", "tags may be supplied\") def get_broker_url(config): return config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\")", "interaction is last: yield (interaction, consumer) else: yield (interaction, None)", "return if hasattr(report, \"wasxfail\"): # wasxfail usually displays an \"X\"", "[f\"Loading pacts from {location}\"] def pytest_configure(config): logging.getLogger(\"pactman\").handlers = [] logging.basicConfig(format=\"%(message)s\")", "\" # \"are uploaded to the broker; multiple tags may", "get_provider_name(config): return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") # add the pact broker", "pact broker\", ) group.addoption( \"--pact-provider-version\", default=None, help=\"provider version to use", "_pytest.outcomes import Failed from _pytest.reports import TestReport from .broker_pact import", "them here so naming consistency can be a thing. #", "config.getoption(\"pact_broker_url\") or os.environ.get(\"PACT_BROKER_URL\") def get_provider_name(config): return config.getoption(\"pact_provider_name\") or os.environ.get(\"PACT_PROVIDER_NAME\") #", ") group.addoption( \"--pact-provider-version\", default=None, help=\"provider version to use when reporting", "finish(self): if self.consumer and self.publish_results and self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts):", "- \" \"DEPRECATED, use --pact-verify-consumer instead\", ) group.addoption( \"--pact-verify-consumer\", default=None,", "attach an interaction to the result, and then display the", "# use our custom TestReport subclass if we're reporting on", "options (pactman)\") group.addoption( \"--pact-files\", default=None, help=\"pact JSON files to verify", "to use when publishing pacts to the broker\") # group.addoption(\"--pact-consumer-version-source\",", "report def toterminal(self, out): out.line(\"Pact failure details:\", bold=True) for text,", "return report def toterminal(self, out): out.line(\"Pact failure details:\", bold=True) for", "*expected* to fail an \"f\" is a little clearer return", "broker\", ) group.addoption( \"--pact-allow-fail\", default=False, action=\"store_true\", help=\"do not fail the", "pacts: last = consumer.interactions[-1] for interaction in consumer.interactions: if interaction", "filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn( \"The --pact-consumer-name command-line option", "to the result, and then display the interaction's verification result", "report.wasxfail = True report.outcome = \"passed\" return report def pytest_report_teststatus(report,", "def toterminal(self, out): out.line(\"Pact failure details:\", bold=True) for text, kw", "help=\"limit broker pacts verified to those matching the tag. May", "= config.getoption(\"verbose\") if verbosity > 0: log.setLevel(logging.DEBUG) class PytestPactVerifier: def", "# group.addoption(\"--pact-consumer-version-tag\", metavar='TAG', action=\"append\", # help=\"tag(s) that should be applied", "log.setLevel(logging.DEBUG) class PytestPactVerifier: def __init__(self, publish_results, provider_version, interaction, consumer): self.publish_results", "0: location = get_broker_url(config) or config.getoption(\"pact_files\") return [f\"Loading pacts from", "deprecated \" \"and will be removed in the 3.0.0 release.\",", "as the traceback of the failure. \"\"\" @classmethod def from_item_and_call(cls,", "the traceback of the failure. \"\"\" @classmethod def from_item_and_call(cls, item,", "to pass report.wasxfail = True report.outcome = \"passed\" return report", "an \"f\" is a little clearer return \"ignore fail\", \"f\",", "interaction self.consumer = consumer def verify(self, provider_url, provider_setup, extra_provider_headers={}): try:", "self.provider_version: self.consumer.publish_result(self.provider_version) def flatten_pacts(pacts): for consumer in pacts: last =", "Failed(str(e)) from None def finish(self): if self.consumer and self.publish_results and", "pact_files_location: raise ValueError(\"need a --pact-broker-url or --pact-files option\") pact_files =", "is deprecated \" \"and will be removed in the 3.0.0", "os.environ.get(\"PACT_PROVIDER_NAME\") # add the pact broker URL to the pytest", "pact in pacts if pact.consumer == filter_consumer_name] metafunc.parametrize(\"pact_verifier\", flatten_pacts(pacts), ids=test_id,", "# \"are uploaded to the broker; multiple tags may be", "or --pact-files option\") pact_files = load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id,", "in self.pact_interaction.result.results_for_terminal(): out.line(text, **kw) if self.verbosity > 0: out.line(\"Traceback:\", bold=True)", "import Failed from _pytest.reports import TestReport from .broker_pact import BrokerPact,", "return str(interaction) def pytest_generate_tests(metafunc): if \"pact_verifier\" in metafunc.fixturenames: broker_url =", "broker\", ) group.addoption( \"--pact-provider-version\", default=None, help=\"provider version to use when", "fail\", \"f\", \"IGNORE_FAIL\" @pytest.fixture() def pact_verifier(pytestconfig, request): interaction, consumer =", "which allows the run to pass report.wasxfail = True report.outcome", "= identifier return str(interaction) def pytest_generate_tests(metafunc): if \"pact_verifier\" in metafunc.fixturenames:", "BrokerPacts, PactBrokerConfig from .result import PytestResult, log def pytest_addoption(parser): group", "from _pytest.outcomes import Failed from _pytest.reports import TestReport from .broker_pact", "verification\", ) # Future options to be implemented. Listing them", "call interaction = item.funcargs[\"pact_verifier\"].interaction report = PactTestReport.from_item_and_call(item, call, interaction) if", "any of these \" \"tags will be verified.\", ) group.addoption(", "result_factory=PytestResult ) pacts = broker_pacts.consumers() filter_consumer_name = metafunc.config.getoption(\"pact_verify_consumer\") if not", "pact broker\") # group.addoption(\"--pact-consumer-version\", default=None, # help=\"consumer version to use", "if we're reporting on a pact verification call interaction =", "to pact broker\", ) group.addoption( \"--pact-provider-version\", default=None, help=\"provider version to", "not filter_consumer_name: filter_consumer_name = metafunc.config.getoption(\"pact_consumer_name\") if filter_consumer_name: warnings.warn( \"The --pact-consumer-name", "config, so we store it here report.verbosity = item.config.option.verbose return", "being verified\" ) group.addoption( \"--pact-consumer-name\", default=None, help=\"consumer name to limit", "filter_consumer_name: pacts = [pact for pact in pacts if pact.consumer", "= load_pact_files(pact_files_location) metafunc.parametrize( \"pact_verifier\", flatten_pacts(pact_files), ids=test_id, indirect=True ) else: provider_name", "the broker; multiple tags may be supplied\") def get_broker_url(config): return" ]
[ "from the main class, it returns the value of `class_doc`,", "def __get__(self, obj, type=None): if obj is None: return self.class_doc", "value of `class_doc`, *not* the property itself. This is necessary", "\"\"\" def __init__(self, class_doc, fget): self.class_doc = class_doc self.fget =", "self.class_doc else: return self.fget(obj) def __set__(self, obj, value): raise AttributeError(\"can't", "def __init__(self, x): ... self.x = x ... @docstring_property(__doc__) ...", "of `class_doc`, *not* the property itself. This is necessary so", "of x is %s.\" % self.x >>> A.__doc__ 'Main docstring'", "\"\"\" def wrapper(fget): return DocstringProperty(class_doc, fget) return wrapper class DocstringProperty(object):", "the main class, it returns the value of `class_doc`, *not*", "* When the attribute is accessed from the main class,", "% self.x >>> A.__doc__ 'Main docstring' >>> a = A(10)", "`class_doc`, *not* the property itself. This is necessary so Sphinx", "is %s.\" % self.x >>> A.__doc__ 'Main docstring' >>> a", "def __doc__(self): ... return \"My value of x is %s.\"", "self.class_doc = class_doc self.fget = fget def __get__(self, obj, type=None):", "main class, it returns the value of `class_doc`, *not* the", "a = A(10) >>> a.__doc__ 'My value of x is", "getting the attribute; setting and deleting raise an `AttributeError`. \"\"\"", "fget): self.class_doc = class_doc self.fget = fget def __get__(self, obj,", "return self.class_doc else: return self.fget(obj) def __set__(self, obj, value): raise", "following two ways: * When the attribute is accessed from", "obj, type=None): if obj is None: return self.class_doc else: return", "setting and deleting raise an `AttributeError`. \"\"\" def __init__(self, class_doc,", "raise AttributeError(\"can't set attribute\") def __delete__(self, obj): raise AttributeError(\"can't delete", "in the following two ways: * When the attribute is", "10.' \"\"\" def wrapper(fget): return DocstringProperty(class_doc, fget) return wrapper class", "an `AttributeError`. \"\"\" def __init__(self, class_doc, fget): self.class_doc = class_doc", "\"My value of x is %s.\" % self.x >>> A.__doc__", ">>> a = A(10) >>> a.__doc__ 'My value of x", "the property itself. This is necessary so Sphinx and other", "class, it returns the value of `class_doc`, *not* the property", "a.__doc__ 'My value of x is 10.' \"\"\" def wrapper(fget):", "can access the class docstring. * Only supports getting the", "docstring. * Only supports getting the attribute; setting and deleting", "x ... @docstring_property(__doc__) ... def __doc__(self): ... return \"My value", "from: https://gist.github.com/bfroehle/4041015 >>> class A(object): ... '''Main docstring''' ... def", "two ways: * When the attribute is accessed from the", "def wrapper(fget): return DocstringProperty(class_doc, fget) return wrapper class DocstringProperty(object): \"\"\"Property", "value): raise AttributeError(\"can't set attribute\") def __delete__(self, obj): raise AttributeError(\"can't", "than `property` in the following two ways: * When the", "property itself. This is necessary so Sphinx and other documentation", "* Only supports getting the attribute; setting and deleting raise", "%s.\" % self.x >>> A.__doc__ 'Main docstring' >>> a =", "docstring' >>> a = A(10) >>> a.__doc__ 'My value of", "x): ... self.x = x ... @docstring_property(__doc__) ... def __doc__(self):", "necessary so Sphinx and other documentation tools can access the", "= class_doc self.fget = fget def __get__(self, obj, type=None): if", "return wrapper class DocstringProperty(object): \"\"\"Property for the `__doc__` attribute. Different", "A(object): ... '''Main docstring''' ... def __init__(self, x): ... self.x", "\"\"\"Property for the `__doc__` attribute. Different than `property` in the", "= fget def __get__(self, obj, type=None): if obj is None:", "... return \"My value of x is %s.\" % self.x", "__doc__(self): ... return \"My value of x is %s.\" %", "supports getting the attribute; setting and deleting raise an `AttributeError`.", "__init__(self, class_doc, fget): self.class_doc = class_doc self.fget = fget def", "is None: return self.class_doc else: return self.fget(obj) def __set__(self, obj,", "other documentation tools can access the class docstring. * Only", "docstring_property(class_doc): \"\"\"Property attribute for docstrings. Took from: https://gist.github.com/bfroehle/4041015 >>> class", "`AttributeError`. \"\"\" def __init__(self, class_doc, fget): self.class_doc = class_doc self.fget", "__get__(self, obj, type=None): if obj is None: return self.class_doc else:", "obj is None: return self.class_doc else: return self.fget(obj) def __set__(self,", ">>> a.__doc__ 'My value of x is 10.' \"\"\" def", "`property` in the following two ways: * When the attribute", "Different than `property` in the following two ways: * When", "def docstring_property(class_doc): \"\"\"Property attribute for docstrings. Took from: https://gist.github.com/bfroehle/4041015 >>>", "class A(object): ... '''Main docstring''' ... def __init__(self, x): ...", "coding: utf-8 -*- def docstring_property(class_doc): \"\"\"Property attribute for docstrings. Took", "'''Main docstring''' ... def __init__(self, x): ... self.x = x", "else: return self.fget(obj) def __set__(self, obj, value): raise AttributeError(\"can't set", "and other documentation tools can access the class docstring. *", "... @docstring_property(__doc__) ... def __doc__(self): ... return \"My value of", "Only supports getting the attribute; setting and deleting raise an", "return self.fget(obj) def __set__(self, obj, value): raise AttributeError(\"can't set attribute\")", "return DocstringProperty(class_doc, fget) return wrapper class DocstringProperty(object): \"\"\"Property for the", "and deleting raise an `AttributeError`. \"\"\" def __init__(self, class_doc, fget):", "the `__doc__` attribute. Different than `property` in the following two", "type=None): if obj is None: return self.class_doc else: return self.fget(obj)", "wrapper(fget): return DocstringProperty(class_doc, fget) return wrapper class DocstringProperty(object): \"\"\"Property for", "so Sphinx and other documentation tools can access the class", "class_doc, fget): self.class_doc = class_doc self.fget = fget def __get__(self,", "self.x = x ... @docstring_property(__doc__) ... def __doc__(self): ... return", ">>> A.__doc__ 'Main docstring' >>> a = A(10) >>> a.__doc__", "if obj is None: return self.class_doc else: return self.fget(obj) def", "= x ... @docstring_property(__doc__) ... def __doc__(self): ... return \"My", "ways: * When the attribute is accessed from the main", "docstrings. Took from: https://gist.github.com/bfroehle/4041015 >>> class A(object): ... '''Main docstring'''", "A.__doc__ 'Main docstring' >>> a = A(10) >>> a.__doc__ 'My", "When the attribute is accessed from the main class, it", "the value of `class_doc`, *not* the property itself. This is", "value of x is %s.\" % self.x >>> A.__doc__ 'Main", "Sphinx and other documentation tools can access the class docstring.", "def __init__(self, class_doc, fget): self.class_doc = class_doc self.fget = fget", "fget def __get__(self, obj, type=None): if obj is None: return", "for the `__doc__` attribute. Different than `property` in the following", "Took from: https://gist.github.com/bfroehle/4041015 >>> class A(object): ... '''Main docstring''' ...", "'Main docstring' >>> a = A(10) >>> a.__doc__ 'My value", "is accessed from the main class, it returns the value", "the attribute is accessed from the main class, it returns", "-*- def docstring_property(class_doc): \"\"\"Property attribute for docstrings. Took from: https://gist.github.com/bfroehle/4041015", "itself. This is necessary so Sphinx and other documentation tools", "documentation tools can access the class docstring. * Only supports", "def __set__(self, obj, value): raise AttributeError(\"can't set attribute\") def __delete__(self,", "attribute is accessed from the main class, it returns the", "class DocstringProperty(object): \"\"\"Property for the `__doc__` attribute. Different than `property`", "This is necessary so Sphinx and other documentation tools can", "DocstringProperty(object): \"\"\"Property for the `__doc__` attribute. Different than `property` in", "__init__(self, x): ... self.x = x ... @docstring_property(__doc__) ... def", "the class docstring. * Only supports getting the attribute; setting", "@docstring_property(__doc__) ... def __doc__(self): ... return \"My value of x", "return \"My value of x is %s.\" % self.x >>>", "class docstring. * Only supports getting the attribute; setting and", "tools can access the class docstring. * Only supports getting", "https://gist.github.com/bfroehle/4041015 >>> class A(object): ... '''Main docstring''' ... def __init__(self,", "deleting raise an `AttributeError`. \"\"\" def __init__(self, class_doc, fget): self.class_doc", "__set__(self, obj, value): raise AttributeError(\"can't set attribute\") def __delete__(self, obj):", "for docstrings. Took from: https://gist.github.com/bfroehle/4041015 >>> class A(object): ... '''Main", "= A(10) >>> a.__doc__ 'My value of x is 10.'", "the following two ways: * When the attribute is accessed", "obj, value): raise AttributeError(\"can't set attribute\") def __delete__(self, obj): raise", "returns the value of `class_doc`, *not* the property itself. This", "... def __doc__(self): ... return \"My value of x is", "'My value of x is 10.' \"\"\" def wrapper(fget): return", "x is 10.' \"\"\" def wrapper(fget): return DocstringProperty(class_doc, fget) return", "class_doc self.fget = fget def __get__(self, obj, type=None): if obj", "is 10.' \"\"\" def wrapper(fget): return DocstringProperty(class_doc, fget) return wrapper", "attribute; setting and deleting raise an `AttributeError`. \"\"\" def __init__(self,", "... def __init__(self, x): ... self.x = x ... @docstring_property(__doc__)", "attribute. Different than `property` in the following two ways: *", "self.fget = fget def __get__(self, obj, type=None): if obj is", "raise an `AttributeError`. \"\"\" def __init__(self, class_doc, fget): self.class_doc =", "it returns the value of `class_doc`, *not* the property itself.", "utf-8 -*- def docstring_property(class_doc): \"\"\"Property attribute for docstrings. Took from:", "fget) return wrapper class DocstringProperty(object): \"\"\"Property for the `__doc__` attribute.", "AttributeError(\"can't set attribute\") def __delete__(self, obj): raise AttributeError(\"can't delete attribute\")", "accessed from the main class, it returns the value of", "-*- coding: utf-8 -*- def docstring_property(class_doc): \"\"\"Property attribute for docstrings.", "attribute for docstrings. Took from: https://gist.github.com/bfroehle/4041015 >>> class A(object): ...", "wrapper class DocstringProperty(object): \"\"\"Property for the `__doc__` attribute. Different than", ">>> class A(object): ... '''Main docstring''' ... def __init__(self, x):", "value of x is 10.' \"\"\" def wrapper(fget): return DocstringProperty(class_doc,", "... '''Main docstring''' ... def __init__(self, x): ... self.x =", "access the class docstring. * Only supports getting the attribute;", "x is %s.\" % self.x >>> A.__doc__ 'Main docstring' >>>", "of x is 10.' \"\"\" def wrapper(fget): return DocstringProperty(class_doc, fget)", "is necessary so Sphinx and other documentation tools can access", "self.fget(obj) def __set__(self, obj, value): raise AttributeError(\"can't set attribute\") def", "# -*- coding: utf-8 -*- def docstring_property(class_doc): \"\"\"Property attribute for", "the attribute; setting and deleting raise an `AttributeError`. \"\"\" def", "docstring''' ... def __init__(self, x): ... self.x = x ...", "DocstringProperty(class_doc, fget) return wrapper class DocstringProperty(object): \"\"\"Property for the `__doc__`", "\"\"\"Property attribute for docstrings. Took from: https://gist.github.com/bfroehle/4041015 >>> class A(object):", "A(10) >>> a.__doc__ 'My value of x is 10.' \"\"\"", "... self.x = x ... @docstring_property(__doc__) ... def __doc__(self): ...", "`__doc__` attribute. Different than `property` in the following two ways:", "None: return self.class_doc else: return self.fget(obj) def __set__(self, obj, value):", "self.x >>> A.__doc__ 'Main docstring' >>> a = A(10) >>>", "*not* the property itself. This is necessary so Sphinx and" ]
[ "'source_idx', 'destination_image', 'destination_idx', 'score', 'ambiguity'] df.source_idx = df.source_idx.map(source_map) df.destination_idx =", "None import numpy as np import pandas as pd def", "destin_map = {k:v for k, v in enumerate(destin_kps.index)} s_siftdata =", "bidx=None, **kwargs): \"\"\" Apply a composite CUDA matcher and ratio", "df.source_idx = df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map) # Set the matches", "are reindexed 0-n, but need to be remapped to the", "= cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches,", "cs = None import numpy as np import pandas as", "as an ambiguity value. In testing symmetry is not required", "it is expensive without significant gain in accuracy when using", "np import pandas as pd def match(edge, aidx=None, bidx=None, **kwargs):", "a composite CUDA matcher and ratio check. If this method", "check. If this method is used, no additional ratio check", "cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _ = s_siftdata.to_data_frame() # Matches are reindexed", "= edge.source['node_id'] destination = np.empty(len(matches)) destination[:] = edge.destination['node_id'] df =", "v in enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx] destin_map", "= np.empty(len(matches)) source[:] = edge.source['node_id'] destination = np.empty(len(matches)) destination[:] =", "to the source_kps, # destin_kps indices. This is the mismatch)", "remapped to the source_kps, # destin_kps indices. This is the", "import warnings try: import cudasift as cs except: cs =", "in enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des)", "import cudasift as cs except: cs = None import numpy", "= edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx] destin_map = {k:v for k,", "destin_des = edge.destination.descriptors[bidx] destin_map = {k:v for k, v in", "testing symmetry is not required as it is expensive without", "**kwargs): \"\"\" Apply a composite CUDA matcher and ratio check.", "reindexed 0-n, but need to be remapped to the source_kps,", "no symmetry check is required. The ratio check is embedded", "indices. This is the mismatch) source = np.empty(len(matches)) source[:] =", "the source_kps, # destin_kps indices. This is the mismatch) source", "np.empty(len(matches)) source[:] = edge.source['node_id'] destination = np.empty(len(matches)) destination[:] = edge.destination['node_id']", "Set the matches and set the 'ratio' (ambiguity) mask edge.matches", "as it is expensive without significant gain in accuracy when", "CUDA matcher and ratio check. If this method is used,", "is not required as it is expensive without significant gain", "warnings try: import cudasift as cs except: cs = None", "try: import cudasift as cs except: cs = None import", "= {k:v for k, v in enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx)", "pandas as pd def match(edge, aidx=None, bidx=None, **kwargs): \"\"\" Apply", "an ambiguity value. In testing symmetry is not required as", "pd.Series(matches.index), pd.Series(destination), matches.match, matches.score, matches.ambiguity], axis=1) df.columns = ['source_image', 'source_idx',", "as cs except: cs = None import numpy as np", "d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _ = s_siftdata.to_data_frame()", "is required. The ratio check is embedded on the cuda", "matches.match, matches.score, matches.ambiguity], axis=1) df.columns = ['source_image', 'source_idx', 'destination_image', 'destination_idx',", "destin_kps = edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx] destin_map = {k:v for", "= edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx] source_map = {k:v for k,", "= s_siftdata.to_data_frame() # Matches are reindexed 0-n, but need to", "and returned as an ambiguity value. In testing symmetry is", "= np.empty(len(matches)) destination[:] = edge.destination['node_id'] df = pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination),", "= {k:v for k, v in enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps,", "and no symmetry check is required. The ratio check is", "df.columns = ['source_image', 'source_idx', 'destination_image', 'destination_idx', 'score', 'ambiguity'] df.source_idx =", "embedded on the cuda side and returned as an ambiguity", "pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match, matches.score, matches.ambiguity], axis=1) df.columns = ['source_image',", "In testing symmetry is not required as it is expensive", "accuracy when using this implementation. \"\"\" source_kps = edge.source.get_keypoints(index=aidx) source_des", "source_des = edge.source.descriptors[aidx] source_map = {k:v for k, v in", "k, v in enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx]", "'destination_idx', 'score', 'ambiguity'] df.source_idx = df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map) #", "['source_image', 'source_idx', 'destination_image', 'destination_idx', 'score', 'ambiguity'] df.source_idx = df.source_idx.map(source_map) df.destination_idx", "enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx] destin_map = {k:v", "source = np.empty(len(matches)) source[:] = edge.source['node_id'] destination = np.empty(len(matches)) destination[:]", "This is the mismatch) source = np.empty(len(matches)) source[:] = edge.source['node_id']", "cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _ = s_siftdata.to_data_frame() # Matches", "Apply a composite CUDA matcher and ratio check. If this", "is expensive without significant gain in accuracy when using this", "0-n, but need to be remapped to the source_kps, #", "cs except: cs = None import numpy as np import", "= pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match, matches.score, matches.ambiguity], axis=1) df.columns =", "for k, v in enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata", "# Set the matches and set the 'ratio' (ambiguity) mask", "ratio check is necessary and no symmetry check is required.", "composite CUDA matcher and ratio check. If this method is", "symmetry check is required. The ratio check is embedded on", "match(edge, aidx=None, bidx=None, **kwargs): \"\"\" Apply a composite CUDA matcher", "'score', 'ambiguity'] df.source_idx = df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map) # Set", "required. The ratio check is embedded on the cuda side", "edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx] destin_map = {k:v for k, v", "no additional ratio check is necessary and no symmetry check", "returned as an ambiguity value. In testing symmetry is not", "gain in accuracy when using this implementation. \"\"\" source_kps =", "for k, v in enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx) destin_des =", "k, v in enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata =", "source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _ =", "edge.destination['node_id'] df = pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match, matches.score, matches.ambiguity], axis=1)", "expensive without significant gain in accuracy when using this implementation.", "implementation. \"\"\" source_kps = edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx] source_map =", "import numpy as np import pandas as pd def match(edge,", "method is used, no additional ratio check is necessary and", "required as it is expensive without significant gain in accuracy", "is embedded on the cuda side and returned as an", "pd def match(edge, aidx=None, bidx=None, **kwargs): \"\"\" Apply a composite", "= df.destination_idx.map(destin_map) # Set the matches and set the 'ratio'", "destination[:] = edge.destination['node_id'] df = pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match, matches.score,", "in accuracy when using this implementation. \"\"\" source_kps = edge.source.get_keypoints(index=aidx)", "destin_kps indices. This is the mismatch) source = np.empty(len(matches)) source[:]", "df = pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match, matches.score, matches.ambiguity], axis=1) df.columns", "= cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _ = s_siftdata.to_data_frame() #", "matches and set the 'ratio' (ambiguity) mask edge.matches = df", "\"\"\" Apply a composite CUDA matcher and ratio check. If", "to be remapped to the source_kps, # destin_kps indices. This", "Matches are reindexed 0-n, but need to be remapped to", "using this implementation. \"\"\" source_kps = edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx]", "necessary and no symmetry check is required. The ratio check", "mismatch) source = np.empty(len(matches)) source[:] = edge.source['node_id'] destination = np.empty(len(matches))", "as np import pandas as pd def match(edge, aidx=None, bidx=None,", "aidx=None, bidx=None, **kwargs): \"\"\" Apply a composite CUDA matcher and", "edge.source['node_id'] destination = np.empty(len(matches)) destination[:] = edge.destination['node_id'] df = pd.concat([pd.Series(source),", "<reponame>gsn9/autocnet import warnings try: import cudasift as cs except: cs", "# Matches are reindexed 0-n, but need to be remapped", "'ambiguity'] df.source_idx = df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map) # Set the", "= df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map) # Set the matches and", "matches.ambiguity], axis=1) df.columns = ['source_image', 'source_idx', 'destination_image', 'destination_idx', 'score', 'ambiguity']", "edge.source.descriptors[aidx] source_map = {k:v for k, v in enumerate(source_kps.index)} destin_kps", "cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _", "_ = s_siftdata.to_data_frame() # Matches are reindexed 0-n, but need", "= ['source_image', 'source_idx', 'destination_image', 'destination_idx', 'score', 'ambiguity'] df.source_idx = df.source_idx.map(source_map)", "as pd def match(edge, aidx=None, bidx=None, **kwargs): \"\"\" Apply a", "numpy as np import pandas as pd def match(edge, aidx=None,", "the matches and set the 'ratio' (ambiguity) mask edge.matches =", "pd.Series(destination), matches.match, matches.score, matches.ambiguity], axis=1) df.columns = ['source_image', 'source_idx', 'destination_image',", "v in enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps,", "ratio check is embedded on the cuda side and returned", "\"\"\" source_kps = edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx] source_map = {k:v", "'destination_image', 'destination_idx', 'score', 'ambiguity'] df.source_idx = df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map)", "but need to be remapped to the source_kps, # destin_kps", "when using this implementation. \"\"\" source_kps = edge.source.get_keypoints(index=aidx) source_des =", "check is required. The ratio check is embedded on the", "= edge.source.descriptors[aidx] source_map = {k:v for k, v in enumerate(source_kps.index)}", "df.destination_idx.map(destin_map) # Set the matches and set the 'ratio' (ambiguity)", "matches, _ = s_siftdata.to_data_frame() # Matches are reindexed 0-n, but", "import pandas as pd def match(edge, aidx=None, bidx=None, **kwargs): \"\"\"", "on the cuda side and returned as an ambiguity value.", "cudasift as cs except: cs = None import numpy as", "cuda side and returned as an ambiguity value. In testing", "If this method is used, no additional ratio check is", "s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata)", "check is embedded on the cuda side and returned as", "this implementation. \"\"\" source_kps = edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx] source_map", "np.empty(len(matches)) destination[:] = edge.destination['node_id'] df = pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match,", "axis=1) df.columns = ['source_image', 'source_idx', 'destination_image', 'destination_idx', 'score', 'ambiguity'] df.source_idx", "source_map = {k:v for k, v in enumerate(source_kps.index)} destin_kps =", "enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des) d_siftdata = cs.PySiftData.from_data_frame(destin_kps, destin_des) cs.PyMatchSiftData(s_siftdata,", "edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx] source_map = {k:v for k, v", "source_kps, # destin_kps indices. This is the mismatch) source =", "significant gain in accuracy when using this implementation. \"\"\" source_kps", "used, no additional ratio check is necessary and no symmetry", "= edge.destination['node_id'] df = pd.concat([pd.Series(source), pd.Series(matches.index), pd.Series(destination), matches.match, matches.score, matches.ambiguity],", "edge.destination.descriptors[bidx] destin_map = {k:v for k, v in enumerate(destin_kps.index)} s_siftdata", "this method is used, no additional ratio check is necessary", "check is necessary and no symmetry check is required. The", "matcher and ratio check. If this method is used, no", "except: cs = None import numpy as np import pandas", "source_kps = edge.source.get_keypoints(index=aidx) source_des = edge.source.descriptors[aidx] source_map = {k:v for", "be remapped to the source_kps, # destin_kps indices. This is", "in enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx) destin_des = edge.destination.descriptors[bidx] destin_map =", "{k:v for k, v in enumerate(destin_kps.index)} s_siftdata = cs.PySiftData.from_data_frame(source_kps, source_des)", "ratio check. If this method is used, no additional ratio", "side and returned as an ambiguity value. In testing symmetry", "is the mismatch) source = np.empty(len(matches)) source[:] = edge.source['node_id'] destination", "d_siftdata) matches, _ = s_siftdata.to_data_frame() # Matches are reindexed 0-n,", "need to be remapped to the source_kps, # destin_kps indices.", "the cuda side and returned as an ambiguity value. In", "is necessary and no symmetry check is required. The ratio", "{k:v for k, v in enumerate(source_kps.index)} destin_kps = edge.destination.get_keypoints(index=bidx) destin_des", "def match(edge, aidx=None, bidx=None, **kwargs): \"\"\" Apply a composite CUDA", "df.destination_idx = df.destination_idx.map(destin_map) # Set the matches and set the", "destin_des) cs.PyMatchSiftData(s_siftdata, d_siftdata) matches, _ = s_siftdata.to_data_frame() # Matches are", "symmetry is not required as it is expensive without significant", "destination = np.empty(len(matches)) destination[:] = edge.destination['node_id'] df = pd.concat([pd.Series(source), pd.Series(matches.index),", "not required as it is expensive without significant gain in", "= None import numpy as np import pandas as pd", "and ratio check. If this method is used, no additional", "ambiguity value. In testing symmetry is not required as it", "matches.score, matches.ambiguity], axis=1) df.columns = ['source_image', 'source_idx', 'destination_image', 'destination_idx', 'score',", "the mismatch) source = np.empty(len(matches)) source[:] = edge.source['node_id'] destination =", "without significant gain in accuracy when using this implementation. \"\"\"", "value. In testing symmetry is not required as it is", "= edge.destination.descriptors[bidx] destin_map = {k:v for k, v in enumerate(destin_kps.index)}", "source[:] = edge.source['node_id'] destination = np.empty(len(matches)) destination[:] = edge.destination['node_id'] df", "df.source_idx.map(source_map) df.destination_idx = df.destination_idx.map(destin_map) # Set the matches and set", "additional ratio check is necessary and no symmetry check is", "The ratio check is embedded on the cuda side and", "s_siftdata.to_data_frame() # Matches are reindexed 0-n, but need to be", "# destin_kps indices. This is the mismatch) source = np.empty(len(matches))", "is used, no additional ratio check is necessary and no" ]
[ "from app.apis.hello import api as hello api = Api( title='api',", "import api as hello api = Api( title='api', version='1.0', description='',", "Api from app.apis.hello import api as hello api = Api(", "app.apis.hello import api as hello api = Api( title='api', version='1.0',", "as hello api = Api( title='api', version='1.0', description='', prefix='/api', doc='/api'", "api = Api( title='api', version='1.0', description='', prefix='/api', doc='/api' ) api.add_namespace(hello)", "hello api = Api( title='api', version='1.0', description='', prefix='/api', doc='/api' )", "flask_restx import Api from app.apis.hello import api as hello api", "from flask_restx import Api from app.apis.hello import api as hello", "import Api from app.apis.hello import api as hello api =", "api as hello api = Api( title='api', version='1.0', description='', prefix='/api'," ]
[ "test.support import captured_stdout from brainfuck import BrainFuck class TestCore(unittest.TestCase): def", "fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def test_set_command(self): bf", "utf-8 import unittest from test.support import captured_stdout from brainfuck import", "range(1, 101): if i % 15 == 0: fizzbuzz_list.append(\"FizzBuzz\") elif", "\"Hello, world!\\n\") def test_fizzbuzz(self): bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout()", "bf.run() fizzbuzz_list = list() for i in range(1, 101): if", "i % 5 == 0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(),", "test_fizzbuzz(self): bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as stdout: bf.run()", "elif i % 5 == 0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\")", "BrainFuck() with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def", "captured_stdout() as stdout: bf.run() fizzbuzz_list = list() for i in", "import BrainFuck class TestCore(unittest.TestCase): def test_hello_world(self): bf = BrainFuck() with", "% 15 == 0: fizzbuzz_list.append(\"FizzBuzz\") elif i % 3 ==", "class TestCore(unittest.TestCase): def test_hello_world(self): bf = BrainFuck() with captured_stdout() as", "0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def test_set_command(self):", "bf = BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\", \"「\",", "0: fizzbuzz_list.append(\"FizzBuzz\") elif i % 3 == 0: fizzbuzz_list.append(\"Fizz\") elif", "== 0: fizzbuzz_list.append(\"FizzBuzz\") elif i % 3 == 0: fizzbuzz_list.append(\"Fizz\")", "i % 3 == 0: fizzbuzz_list.append(\"Fizz\") elif i % 5", "\"にゃん\", \"にゃ。\", \"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as stdout:", "% 3 == 0: fizzbuzz_list.append(\"Fizz\") elif i % 5 ==", "5 == 0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list))", "stdout: bf.run() fizzbuzz_list = list() for i in range(1, 101):", "world!\\n\") def test_fizzbuzz(self): bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as", "self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def test_fizzbuzz(self): bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with", "self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def test_set_command(self): bf = BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\",", "captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def test_fizzbuzz(self): bf", "3 == 0: fizzbuzz_list.append(\"Fizz\") elif i % 5 == 0:", "0: fizzbuzz_list.append(\"Fizz\") elif i % 5 == 0: fizzbuzz_list.append(\"Buzz\") else:", "bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with", "== 0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def", "BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as stdout: bf.run() fizzbuzz_list = list()", "test_hello_world(self): bf = BrainFuck() with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(),", "def test_fizzbuzz(self): bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as stdout:", "fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def test_set_command(self): bf = BrainFuck() bf.set_command(\"にゃにゃ\",", "\"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(),", "\"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello,", "captured_stdout from brainfuck import BrainFuck class TestCore(unittest.TestCase): def test_hello_world(self): bf", "for i in range(1, 101): if i % 15 ==", "bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") if", "15 == 0: fizzbuzz_list.append(\"FizzBuzz\") elif i % 3 == 0:", "brainfuck import BrainFuck class TestCore(unittest.TestCase): def test_hello_world(self): bf = BrainFuck()", "else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def test_set_command(self): bf =", "= BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\", \"「\", \"」\")", "unittest from test.support import captured_stdout from brainfuck import BrainFuck class", "from brainfuck import BrainFuck class TestCore(unittest.TestCase): def test_hello_world(self): bf =", "\".join(fizzbuzz_list)) def test_set_command(self): bf = BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\",", "= list() for i in range(1, 101): if i %", "stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def test_fizzbuzz(self): bf = BrainFuck()", "with captured_stdout() as stdout: bf.run() fizzbuzz_list = list() for i", "bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def test_fizzbuzz(self): bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\")", "i % 15 == 0: fizzbuzz_list.append(\"FizzBuzz\") elif i % 3", "if i % 15 == 0: fizzbuzz_list.append(\"FizzBuzz\") elif i %", "test_set_command(self): bf = BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\",", "= BrainFuck() with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\")", "def test_set_command(self): bf = BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\",", "BrainFuck class TestCore(unittest.TestCase): def test_hello_world(self): bf = BrainFuck() with captured_stdout()", "\"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\")", "fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \" \".join(fizzbuzz_list)) def test_set_command(self): bf = BrainFuck()", "fizzbuzz_list.append(\"Fizz\") elif i % 5 == 0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i))", "with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def test_fizzbuzz(self):", "captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") if __name__ ==", "as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") if __name__ == \"__main__\":", "fizzbuzz_list = list() for i in range(1, 101): if i", "as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") def test_fizzbuzz(self): bf =", "from test.support import captured_stdout from brainfuck import BrainFuck class TestCore(unittest.TestCase):", "\"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout()", "= BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as stdout: bf.run() fizzbuzz_list =", "# coding: utf-8 import unittest from test.support import captured_stdout from", "in range(1, 101): if i % 15 == 0: fizzbuzz_list.append(\"FizzBuzz\")", "\" \".join(fizzbuzz_list)) def test_set_command(self): bf = BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\",", "\"にゃ。\", \"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as stdout: bf.run()", "coding: utf-8 import unittest from test.support import captured_stdout from brainfuck", "bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as stdout: bf.run() fizzbuzz_list = list() for", "elif i % 3 == 0: fizzbuzz_list.append(\"Fizz\") elif i %", "stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") if __name__ == \"__main__\": unittest.main()", "as stdout: bf.run() fizzbuzz_list = list() for i in range(1,", "bf = BrainFuck() with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello,", "BrainFuck() bf.set_command(\"にゃにゃ\", \"にゃー\", \"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\")", "import unittest from test.support import captured_stdout from brainfuck import BrainFuck", "\"にゃっ\", \"にゃん\", \"にゃ。\", \"にゃ、\", \"「\", \"」\") bf.load_file(\"./tests/hello_world_nya.txt\") with captured_stdout() as", "101): if i % 15 == 0: fizzbuzz_list.append(\"FizzBuzz\") elif i", "with captured_stdout() as stdout: bf.run() self.assertEqual(stdout.getvalue(), \"Hello, world!\\n\") if __name__", "def test_hello_world(self): bf = BrainFuck() with captured_stdout() as stdout: bf.run()", "i in range(1, 101): if i % 15 == 0:", "list() for i in range(1, 101): if i % 15", "import captured_stdout from brainfuck import BrainFuck class TestCore(unittest.TestCase): def test_hello_world(self):", "% 5 == 0: fizzbuzz_list.append(\"Buzz\") else: fizzbuzz_list.append(str(i)) fizzbuzz_list.append(\"\\n\") self.assertEqual(stdout.getvalue(), \"", "TestCore(unittest.TestCase): def test_hello_world(self): bf = BrainFuck() with captured_stdout() as stdout:", "bf = BrainFuck() bf.load_file(\"./tests/fizz_buzz.txt\") with captured_stdout() as stdout: bf.run() fizzbuzz_list", "<gh_stars>0 # coding: utf-8 import unittest from test.support import captured_stdout", "== 0: fizzbuzz_list.append(\"Fizz\") elif i % 5 == 0: fizzbuzz_list.append(\"Buzz\")", "fizzbuzz_list.append(\"FizzBuzz\") elif i % 3 == 0: fizzbuzz_list.append(\"Fizz\") elif i" ]
[ "FastAPI() templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async", "with cookies return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async def home(request: Request):", "Response from fastapi.responses import HTMLResponse from fastapi.staticfiles import StaticFiles from", "FastAPI, Request, Response from fastapi.responses import HTMLResponse from fastapi.staticfiles import", "templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async def", "fastapi.responses import HTMLResponse from fastapi.staticfiles import StaticFiles from fastapi.templating import", "# Expect requests with cookies return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async", "home(request: Request): # Expect requests with cookies return get_page_data(request) if", "fastapi import FastAPI, Request, Response from fastapi.responses import HTMLResponse from", "import uvicorn app = FastAPI() templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"),", "home(request: Request): # Expect requests with cookies return process_initial(request) @app.get(\"/page\",", "fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates from utils import", "from utils import get_page_data, process_initial import uvicorn app = FastAPI()", "cookies return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async def home(request: Request): #", "process_initial import uvicorn app = FastAPI() templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\",", "Expect requests with cookies return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async def", "def home(request: Request): # Expect requests with cookies return get_page_data(request)", "from fastapi import FastAPI, Request, Response from fastapi.responses import HTMLResponse", "@app.get(\"/\", response_class=HTMLResponse) async def home(request: Request): # Expect requests with", "fastapi.templating import Jinja2Templates from utils import get_page_data, process_initial import uvicorn", "Jinja2Templates from utils import get_page_data, process_initial import uvicorn app =", "import Jinja2Templates from utils import get_page_data, process_initial import uvicorn app", "from fastapi.responses import HTMLResponse from fastapi.staticfiles import StaticFiles from fastapi.templating", "import StaticFiles from fastapi.templating import Jinja2Templates from utils import get_page_data,", "app = FastAPI() templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\",", "response_class=HTMLResponse) async def home(request: Request): # Expect requests with cookies", "= FastAPI() templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse)", "return get_page_data(request) if __name__ == \"__main__\": uvicorn.run(\"main:app\", host=\"127.0.0.1\", port=8050, log_level=\"info\")", "get_page_data, process_initial import uvicorn app = FastAPI() templates = Jinja2Templates(directory=\"templates\")", "Request, Response from fastapi.responses import HTMLResponse from fastapi.staticfiles import StaticFiles", "import HTMLResponse from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates", "utils import get_page_data, process_initial import uvicorn app = FastAPI() templates", "# Expect requests with cookies return get_page_data(request) if __name__ ==", "def home(request: Request): # Expect requests with cookies return process_initial(request)", "from fastapi.templating import Jinja2Templates from utils import get_page_data, process_initial import", "async def home(request: Request): # Expect requests with cookies return", "Request): # Expect requests with cookies return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse)", "Request): # Expect requests with cookies return get_page_data(request) if __name__", "Expect requests with cookies return get_page_data(request) if __name__ == \"__main__\":", "app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async def home(request: Request): #", "with cookies return get_page_data(request) if __name__ == \"__main__\": uvicorn.run(\"main:app\", host=\"127.0.0.1\",", "process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async def home(request: Request): # Expect requests", "import FastAPI, Request, Response from fastapi.responses import HTMLResponse from fastapi.staticfiles", "cookies return get_page_data(request) if __name__ == \"__main__\": uvicorn.run(\"main:app\", host=\"127.0.0.1\", port=8050,", "requests with cookies return get_page_data(request) if __name__ == \"__main__\": uvicorn.run(\"main:app\",", "return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async def home(request: Request): # Expect", "@app.get(\"/page\", response_class=HTMLResponse) async def home(request: Request): # Expect requests with", "Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async def home(request: Request):", "StaticFiles from fastapi.templating import Jinja2Templates from utils import get_page_data, process_initial", "from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates from utils", "requests with cookies return process_initial(request) @app.get(\"/page\", response_class=HTMLResponse) async def home(request:", "StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async def home(request: Request): # Expect", "= Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async def home(request:", "HTMLResponse from fastapi.staticfiles import StaticFiles from fastapi.templating import Jinja2Templates from", "name=\"static\") @app.get(\"/\", response_class=HTMLResponse) async def home(request: Request): # Expect requests", "import get_page_data, process_initial import uvicorn app = FastAPI() templates =", "uvicorn app = FastAPI() templates = Jinja2Templates(directory=\"templates\") app.mount(\"/static\", StaticFiles(directory=\"static\"), name=\"static\")" ]
[ "\"Description\": \"\", \"MembershipTypeId\": 12, \"ParentId\": int(group_id)} } create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup'", "OME API to create a new static group # #", "= requests.get(group_url, headers=headers, verify=False) if response.status_code == 200: json_data =", "\"Random Test Group\" \"\"\" import json import argparse from argparse", "2.0 (the \"License\"); # you may not use this file", "verify=False) if response.status_code == 200: json_data = response.json() if json_data['@odata.count']", "group_name, \"Description\": \"\", \"MembershipTypeId\": 12, \"ParentId\": int(group_id)} } create_url =", "data=json.dumps(user_details), headers=headers) if session_info.status_code == 201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response", "used over Basic Authentication Note that the credentials entered are", "if create_resp.status_code == 200: print(\"New group created : ID =\",", "error info below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else: print(\"Unable to retrieve", "with OME and enumerate groups \"\"\" try: session_url = 'https://%s/api/SessionService/Sessions'", "def create_static_group(ip_address, user_name, password, group_name): \"\"\" Authenticate with OME and", "# Technically there should be only one result in the", "name for the group\") args = parser.parse_args() create_static_group(args.ip, args.user, args.password,", "import RawTextHelpFormatter import urllib3 import requests def create_static_group(ip_address, user_name, password,", "if response.status_code == 200: json_data = response.json() if json_data['@odata.count'] >", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "new static group DESCRIPTION: This script exercises the OME REST", "in the filter group_id = json_data['value'][0]['Id'] group_payload = {\"GroupModel\": {", "= 'https://%s/api/SessionService/Sessions' % ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\"", "%s\" % ip_address) except Exception as error: print(\"Unexpected error:\", str(error))", "argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME Appliance IP\") parser.add_argument(\"--user\", \"-u\",", "new static group. The user is responsible for adding devices", "for adding devices to the group once the group has", "== 400: print(\"Failed group creation ...See error info below\") print(json.dumps(create_resp.json(),", "valid name for the group\") args = parser.parse_args() create_static_group(args.ip, args.user,", ": ID =\", create_resp.text) elif create_resp.status_code == 400: print(\"Failed group", "ip_address create_resp = requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload)) if create_resp.status_code ==", "For authentication X-Auth is used over Basic Authentication Note that", "use this file except in compliance with the License. #", "as error: print(\"Unexpected error:\", str(error)) if __name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)", "\"-g\", required=True, help=\"A valid name for the group\") args =", "are not stored to disk. EXAMPLE: python create_static_group.py --ip <xx>", "{'UserName': user_name, 'Password': password, 'SessionType': 'API'} session_info = requests.post(session_url, verify=False,", "response.status_code == 200: json_data = response.json() if json_data['@odata.count'] > 0:", "\"-p\", required=True, help=\"Password for OME Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A", "to retrieve group list from %s\" % ip_address) else: print(\"Unable", "under the License. # \"\"\" SYNOPSIS: Script to create a", "# # _author_ = <NAME> <<EMAIL>> # _version_ = 0.1", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "OME and enumerate groups \"\"\" try: session_url = 'https://%s/api/SessionService/Sessions' %", "import requests def create_static_group(ip_address, user_name, password, group_name): \"\"\" Authenticate with", "session_info.status_code == 201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response = requests.get(group_url, headers=headers,", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "X-Auth is used over Basic Authentication Note that the credentials", "200: print(\"New group created : ID =\", create_resp.text) elif create_resp.status_code", "# \"\"\" SYNOPSIS: Script to create a new static group", "--ip <xx> --user <username> --password <<PASSWORD>> --groupname \"Random Test Group\"", "user_name, 'Password': password, 'SessionType': 'API'} session_info = requests.post(session_url, verify=False, data=json.dumps(user_details),", "create_static_group(ip_address, user_name, password, group_name): \"\"\" Authenticate with OME and enumerate", "create a session with appliance %s\" % ip_address) except Exception", "session_info.headers['X-Auth-Token'] response = requests.get(group_url, headers=headers, verify=False) if response.status_code == 200:", "group # # _author_ = <NAME> <<EMAIL>> # _version_ =", "else: print(\"Unable to retrieve group list from %s\" % ip_address)", "'__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME", "static group. The user is responsible for adding devices to", "in compliance with the License. # You may obtain a", "} create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp = requests.post(create_url, headers=headers,", "the group has been successfully created. For authentication X-Auth is", "software # distributed under the License is distributed on an", "EXAMPLE: python create_static_group.py --ip <xx> --user <username> --password <<PASSWORD>> --groupname", "a new static group # # _author_ = <NAME> <<EMAIL>>", "is responsible for adding devices to the group once the", "be only one result in the filter group_id = json_data['value'][0]['Id']", "password, 'SessionType': 'API'} session_info = requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers) if", "# _author_ = <NAME> <<EMAIL>> # _version_ = 0.1 #", "to the group once the group has been successfully created.", "to create a new static group DESCRIPTION: This script exercises", "been successfully created. For authentication X-Auth is used over Basic", "\"\"\" try: session_url = 'https://%s/api/SessionService/Sessions' % ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name", "ID =\", create_resp.text) elif create_resp.status_code == 400: print(\"Failed group creation", "Exception as error: print(\"Unexpected error:\", str(error)) if __name__ == '__main__':", "help=\"Username for OME Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password for", "== 200: json_data = response.json() if json_data['@odata.count'] > 0: #", "once the group has been successfully created. For authentication X-Auth", "else: print(\"Unable to create a session with appliance %s\" %", "= {\"GroupModel\": { \"Name\": group_name, \"Description\": \"\", \"MembershipTypeId\": 12, \"ParentId\":", "OME Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password for OME Appliance\")", "user_details = {'UserName': user_name, 'Password': password, 'SessionType': 'API'} session_info =", "headers=headers, verify=False, data=json.dumps(group_payload)) if create_resp.status_code == 200: print(\"New group created", "authentication X-Auth is used over Basic Authentication Note that the", "% ip_address headers = {'content-type': 'application/json'} user_details = {'UserName': user_name,", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "__name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\",", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"\"\" import json import argparse from argparse import RawTextHelpFormatter import", "to in writing, software # distributed under the License is", "= requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers) if session_info.status_code == 201: headers['X-Auth-Token']", "# See the License for the specific language governing permissions", "\"Name\": group_name, \"Description\": \"\", \"MembershipTypeId\": 12, \"ParentId\": int(group_id)} } create_url", "Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password for OME Appliance\") parser.add_argument(\"--groupname\",", "language governing permissions and # limitations under the License. #", "= {'UserName': user_name, 'Password': password, 'SessionType': 'API'} session_info = requests.post(session_url,", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "ip_address headers = {'content-type': 'application/json'} user_details = {'UserName': user_name, 'Password':", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "result in the filter group_id = json_data['value'][0]['Id'] group_payload = {\"GroupModel\":", "Test Group\" \"\"\" import json import argparse from argparse import", "help=\"OME Appliance IP\") parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username for OME Appliance\",", "with the License. # You may obtain a copy of", "str(error)) if __name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter)", "create_resp = requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload)) if create_resp.status_code == 200:", "\"-u\", required=False, help=\"Username for OME Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True,", "print(\"Failed group creation ...See error info below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False))", "one result in the filter group_id = json_data['value'][0]['Id'] group_payload =", "limitations under the License. # \"\"\" SYNOPSIS: Script to create", "response.json() if json_data['@odata.count'] > 0: # Technically there should be", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "{'content-type': 'application/json'} user_details = {'UserName': user_name, 'Password': password, 'SessionType': 'API'}", "for the group\") args = parser.parse_args() create_static_group(args.ip, args.user, args.password, args.groupname)", "_author_ = <NAME> <<EMAIL>> # _version_ = 0.1 # #", "distributed under the License is distributed on an \"AS IS\"", "group DESCRIPTION: This script exercises the OME REST API to", "the group once the group has been successfully created. For", "the filter group_id = json_data['value'][0]['Id'] group_payload = {\"GroupModel\": { \"Name\":", "and # limitations under the License. # \"\"\" SYNOPSIS: Script", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "\"-i\", required=True, help=\"OME Appliance IP\") parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username for", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "sort_keys=False)) else: print(\"Unable to retrieve group list from %s\" %", "= argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME Appliance IP\") parser.add_argument(\"--user\",", "writing, software # distributed under the License is distributed on", "the License. # \"\"\" SYNOPSIS: Script to create a new", "for OME Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password for OME", "elif create_resp.status_code == 400: print(\"Failed group creation ...See error info", "you may not use this file except in compliance with", "{\"GroupModel\": { \"Name\": group_name, \"Description\": \"\", \"MembershipTypeId\": 12, \"ParentId\": int(group_id)}", "% ip_address) else: print(\"Unable to create a session with appliance", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "create_static_group.py --ip <xx> --user <username> --password <<PASSWORD>> --groupname \"Random Test", "'API'} session_info = requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers) if session_info.status_code ==", "python create_static_group.py --ip <xx> --user <username> --password <<PASSWORD>> --groupname \"Random", "= 0.1 # # Copyright (c) 2020 Dell EMC Corporation", "a new static group DESCRIPTION: This script exercises the OME", "\"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\" % ip_address headers = {'content-type': 'application/json'}", "data=json.dumps(group_payload)) if create_resp.status_code == 200: print(\"New group created : ID", "session_info = requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers) if session_info.status_code == 201:", "CONDITIONS OF ANY KIND, either express or implied. # See", "group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\" % ip_address headers =", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\" % ip_address headers", "\"\"\" Authenticate with OME and enumerate groups \"\"\" try: session_url", "user is responsible for adding devices to the group once", "verify=False, data=json.dumps(user_details), headers=headers) if session_info.status_code == 201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token']", "devices to the group once the group has been successfully", "(c) 2020 Dell EMC Corporation # # Licensed under the", "using OME API to create a new static group #", "stored to disk. EXAMPLE: python create_static_group.py --ip <xx> --user <username>", "if __name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\",", "== 201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response = requests.get(group_url, headers=headers, verify=False)", "group has been successfully created. For authentication X-Auth is used", "new static group # # _author_ = <NAME> <<EMAIL>> #", "help=\"A valid name for the group\") args = parser.parse_args() create_static_group(args.ip,", "<username> --password <<PASSWORD>> --groupname \"Random Test Group\" \"\"\" import json", "OR CONDITIONS OF ANY KIND, either express or implied. #", "'application/json'} user_details = {'UserName': user_name, 'Password': password, 'SessionType': 'API'} session_info", "the License is distributed on an \"AS IS\" BASIS, #", "headers=headers) if session_info.status_code == 201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response =", "= <NAME> <<EMAIL>> # _version_ = 0.1 # # Copyright", "parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME Appliance IP\") parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username", "= json_data['value'][0]['Id'] group_payload = {\"GroupModel\": { \"Name\": group_name, \"Description\": \"\",", "groups \"\"\" try: session_url = 'https://%s/api/SessionService/Sessions' % ip_address group_url =", "governing permissions and # limitations under the License. # \"\"\"", "has been successfully created. For authentication X-Auth is used over", "to create a new static group # # _author_ =", "default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password for OME Appliance\") parser.add_argument(\"--groupname\", \"-g\",", "The user is responsible for adding devices to the group", "Basic Authentication Note that the credentials entered are not stored", "<<EMAIL>> # _version_ = 0.1 # # Copyright (c) 2020", "try: session_url = 'https://%s/api/SessionService/Sessions' % ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq", "Group\" \"\"\" import json import argparse from argparse import RawTextHelpFormatter", "200: json_data = response.json() if json_data['@odata.count'] > 0: # Technically", "law or agreed to in writing, software # distributed under", "json import argparse from argparse import RawTextHelpFormatter import urllib3 import", "error: print(\"Unexpected error:\", str(error)) if __name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser", "# Copyright (c) 2020 Dell EMC Corporation # # Licensed", "Groups'\" % ip_address headers = {'content-type': 'application/json'} user_details = {'UserName':", "print(\"Unable to create a session with appliance %s\" % ip_address)", "user_name, password, group_name): \"\"\" Authenticate with OME and enumerate groups", "enumerate groups \"\"\" try: session_url = 'https://%s/api/SessionService/Sessions' % ip_address group_url", "group once the group has been successfully created. For authentication", "create a new static group DESCRIPTION: This script exercises the", "may obtain a copy of the License at # #", "Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A valid name for the group\")", "headers=headers, verify=False) if response.status_code == 200: json_data = response.json() if", "required=False, help=\"Username for OME Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password", "<xx> --user <username> --password <<PASSWORD>> --groupname \"Random Test Group\" \"\"\"", "to create a new static group. The user is responsible", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "may not use this file except in compliance with the", "creation ...See error info below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else: print(\"Unable", "group list from %s\" % ip_address) else: print(\"Unable to create", "print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else: print(\"Unable to retrieve group list from", "headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response = requests.get(group_url, headers=headers, verify=False) if response.status_code", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "Note that the credentials entered are not stored to disk.", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "static group DESCRIPTION: This script exercises the OME REST API", "requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers) if session_info.status_code == 201: headers['X-Auth-Token'] =", "requests.get(group_url, headers=headers, verify=False) if response.status_code == 200: json_data = response.json()", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "\"\"\" SYNOPSIS: Script to create a new static group DESCRIPTION:", "json_data['value'][0]['Id'] group_payload = {\"GroupModel\": { \"Name\": group_name, \"Description\": \"\", \"MembershipTypeId\":", "12, \"ParentId\": int(group_id)} } create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "REST API to create a new static group. The user", "there should be only one result in the filter group_id", "adding devices to the group once the group has been", "= requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload)) if create_resp.status_code == 200: print(\"New", "Authenticate with OME and enumerate groups \"\"\" try: session_url =", "if json_data['@odata.count'] > 0: # Technically there should be only", "\"MembershipTypeId\": 12, \"ParentId\": int(group_id)} } create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address", "from %s\" % ip_address) else: print(\"Unable to create a session", "required=True, help=\"OME Appliance IP\") parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username for OME", "exercises the OME REST API to create a new static", "appliance %s\" % ip_address) except Exception as error: print(\"Unexpected error:\",", "# _version_ = 0.1 # # Copyright (c) 2020 Dell", "a session with appliance %s\" % ip_address) except Exception as", "%s\" % ip_address) else: print(\"Unable to create a session with", "parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME Appliance IP\")", "{ \"Name\": group_name, \"Description\": \"\", \"MembershipTypeId\": 12, \"ParentId\": int(group_id)} }", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp = requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload)) if", "formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME Appliance IP\") parser.add_argument(\"--user\", \"-u\", required=False,", "error:\", str(error)) if __name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__,", "= 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp = requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload))", "is used over Basic Authentication Note that the credentials entered", "requests def create_static_group(ip_address, user_name, password, group_name): \"\"\" Authenticate with OME", "or implied. # See the License for the specific language", "ip_address) else: print(\"Unable to create a session with appliance %s\"", "verify=False, data=json.dumps(group_payload)) if create_resp.status_code == 200: print(\"New group created :", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "and enumerate groups \"\"\" try: session_url = 'https://%s/api/SessionService/Sessions' % ip_address", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "== '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True,", "201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response = requests.get(group_url, headers=headers, verify=False) if", "not stored to disk. EXAMPLE: python create_static_group.py --ip <xx> --user", "Copyright (c) 2020 Dell EMC Corporation # # Licensed under", "argparse from argparse import RawTextHelpFormatter import urllib3 import requests def", "argparse import RawTextHelpFormatter import urllib3 import requests def create_static_group(ip_address, user_name,", "parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A valid name for the group\") args", "(the \"License\"); # you may not use this file except", "list from %s\" % ip_address) else: print(\"Unable to create a", "permissions and # limitations under the License. # \"\"\" SYNOPSIS:", "# you may not use this file except in compliance", "'https://%s/api/SessionService/Sessions' % ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\" %", "SYNOPSIS: Script to create a new static group DESCRIPTION: This", "to create a session with appliance %s\" % ip_address) except", "RawTextHelpFormatter import urllib3 import requests def create_static_group(ip_address, user_name, password, group_name):", "response = requests.get(group_url, headers=headers, verify=False) if response.status_code == 200: json_data", "<<PASSWORD>> --groupname \"Random Test Group\" \"\"\" import json import argparse", "OME REST API to create a new static group. The", "0: # Technically there should be only one result in", "# # Unless required by applicable law or agreed to", "should be only one result in the filter group_id =", "below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else: print(\"Unable to retrieve group list", "import json import argparse from argparse import RawTextHelpFormatter import urllib3", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "'SessionType': 'API'} session_info = requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers) if session_info.status_code", "Version 2.0 (the \"License\"); # you may not use this", "print(\"Unable to retrieve group list from %s\" % ip_address) else:", "static group # # _author_ = <NAME> <<EMAIL>> # _version_", "OME Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A valid name for the", "from argparse import RawTextHelpFormatter import urllib3 import requests def create_static_group(ip_address,", "json_data = response.json() if json_data['@odata.count'] > 0: # Technically there", "Script to create a new static group DESCRIPTION: This script", "eq 'Static Groups'\" % ip_address headers = {'content-type': 'application/json'} user_details", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "# # Python script using OME API to create a", "<NAME> <<EMAIL>> # _version_ = 0.1 # # Copyright (c)", "disk. EXAMPLE: python create_static_group.py --ip <xx> --user <username> --password <<PASSWORD>>", "int(group_id)} } create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp = requests.post(create_url,", "successfully created. For authentication X-Auth is used over Basic Authentication", "if session_info.status_code == 201: headers['X-Auth-Token'] = session_info.headers['X-Auth-Token'] response = requests.get(group_url,", "by applicable law or agreed to in writing, software #", "Dell EMC Corporation # # Licensed under the Apache License,", "= session_info.headers['X-Auth-Token'] response = requests.get(group_url, headers=headers, verify=False) if response.status_code ==", "group created : ID =\", create_resp.text) elif create_resp.status_code == 400:", "License. # \"\"\" SYNOPSIS: Script to create a new static", "Technically there should be only one result in the filter", "# limitations under the License. # \"\"\" SYNOPSIS: Script to", "only one result in the filter group_id = json_data['value'][0]['Id'] group_payload", "a new static group. The user is responsible for adding", "indent=4, sort_keys=False)) else: print(\"Unable to retrieve group list from %s\"", "create_resp.status_code == 200: print(\"New group created : ID =\", create_resp.text)", "created. For authentication X-Auth is used over Basic Authentication Note", "for OME Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A valid name for", "create_resp.status_code == 400: print(\"Failed group creation ...See error info below\")", "% ip_address) except Exception as error: print(\"Unexpected error:\", str(error)) if", "print(\"Unexpected error:\", str(error)) if __name__ == '__main__': urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser =", "<gh_stars>0 # # Python script using OME API to create", "session with appliance %s\" % ip_address) except Exception as error:", "Python script using OME API to create a new static", "group_name): \"\"\" Authenticate with OME and enumerate groups \"\"\" try:", "with appliance %s\" % ip_address) except Exception as error: print(\"Unexpected", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "_version_ = 0.1 # # Copyright (c) 2020 Dell EMC", "Unless required by applicable law or agreed to in writing,", "0.1 # # Copyright (c) 2020 Dell EMC Corporation #", "the credentials entered are not stored to disk. EXAMPLE: python", "'Password': password, 'SessionType': 'API'} session_info = requests.post(session_url, verify=False, data=json.dumps(user_details), headers=headers)", "= \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\" % ip_address headers = {'content-type':", "400: print(\"Failed group creation ...See error info below\") print(json.dumps(create_resp.json(), indent=4,", "the specific language governing permissions and # limitations under the", "applicable law or agreed to in writing, software # distributed", "info below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else: print(\"Unable to retrieve group", "json_data['@odata.count'] > 0: # Technically there should be only one", "create_resp.text) elif create_resp.status_code == 400: print(\"Failed group creation ...See error", "urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawTextHelpFormatter) parser.add_argument(\"--ip\", \"-i\", required=True, help=\"OME Appliance", "=\", create_resp.text) elif create_resp.status_code == 400: print(\"Failed group creation ...See", "retrieve group list from %s\" % ip_address) else: print(\"Unable to", "in writing, software # distributed under the License is distributed", "group_payload = {\"GroupModel\": { \"Name\": group_name, \"Description\": \"\", \"MembershipTypeId\": 12,", "\"\", \"MembershipTypeId\": 12, \"ParentId\": int(group_id)} } create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' %", "required=True, help=\"Password for OME Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A valid", "\"ParentId\": int(group_id)} } create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp =", "API to create a new static group # # _author_", "...See error info below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else: print(\"Unable to", "= response.json() if json_data['@odata.count'] > 0: # Technically there should", "> 0: # Technically there should be only one result", "% ip_address create_resp = requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload)) if create_resp.status_code", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "create a new static group # # _author_ = <NAME>", "entered are not stored to disk. EXAMPLE: python create_static_group.py --ip", "# You may obtain a copy of the License at", "EMC Corporation # # Licensed under the Apache License, Version", "responsible for adding devices to the group once the group", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "ip_address) except Exception as error: print(\"Unexpected error:\", str(error)) if __name__", "Authentication Note that the credentials entered are not stored to", "create_url = 'https://%s/api/GroupService/Actions/GroupService.CreateGroup' % ip_address create_resp = requests.post(create_url, headers=headers, verify=False,", "parser.add_argument(\"--password\", \"-p\", required=True, help=\"Password for OME Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True,", "the License for the specific language governing permissions and #", "the OME REST API to create a new static group.", "Apache License, Version 2.0 (the \"License\"); # you may not", "required=True, help=\"A valid name for the group\") args = parser.parse_args()", "session_url = 'https://%s/api/SessionService/Sessions' % ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static", "credentials entered are not stored to disk. EXAMPLE: python create_static_group.py", "either express or implied. # See the License for the", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "This script exercises the OME REST API to create a", "script exercises the OME REST API to create a new", "except Exception as error: print(\"Unexpected error:\", str(error)) if __name__ ==", "API to create a new static group. The user is", "--groupname \"Random Test Group\" \"\"\" import json import argparse from", "% ip_address group_url = \"https://%s/api/GroupService/Groups?$filter=Name eq 'Static Groups'\" % ip_address", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "# # Copyright (c) 2020 Dell EMC Corporation # #", "IP\") parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username for OME Appliance\", default=\"admin\") parser.add_argument(\"--password\",", "group creation ...See error info below\") print(json.dumps(create_resp.json(), indent=4, sort_keys=False)) else:", "help=\"Password for OME Appliance\") parser.add_argument(\"--groupname\", \"-g\", required=True, help=\"A valid name", "group. The user is responsible for adding devices to the", "that the credentials entered are not stored to disk. EXAMPLE:", "Appliance IP\") parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username for OME Appliance\", default=\"admin\")", "--password <<PASSWORD>> --groupname \"Random Test Group\" \"\"\" import json import", "\"License\"); # you may not use this file except in", "DESCRIPTION: This script exercises the OME REST API to create", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "2020 Dell EMC Corporation # # Licensed under the Apache", "import urllib3 import requests def create_static_group(ip_address, user_name, password, group_name): \"\"\"", "'Static Groups'\" % ip_address headers = {'content-type': 'application/json'} user_details =", "created : ID =\", create_resp.text) elif create_resp.status_code == 400: print(\"Failed", "# distributed under the License is distributed on an \"AS", "requests.post(create_url, headers=headers, verify=False, data=json.dumps(group_payload)) if create_resp.status_code == 200: print(\"New group", "# Unless required by applicable law or agreed to in", "group_id = json_data['value'][0]['Id'] group_payload = {\"GroupModel\": { \"Name\": group_name, \"Description\":", "headers = {'content-type': 'application/json'} user_details = {'UserName': user_name, 'Password': password,", "import argparse from argparse import RawTextHelpFormatter import urllib3 import requests", "script using OME API to create a new static group", "urllib3 import requests def create_static_group(ip_address, user_name, password, group_name): \"\"\" Authenticate", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "== 200: print(\"New group created : ID =\", create_resp.text) elif", "print(\"New group created : ID =\", create_resp.text) elif create_resp.status_code ==", "to disk. EXAMPLE: python create_static_group.py --ip <xx> --user <username> --password", "You may obtain a copy of the License at #", "filter group_id = json_data['value'][0]['Id'] group_payload = {\"GroupModel\": { \"Name\": group_name,", "parser.add_argument(\"--user\", \"-u\", required=False, help=\"Username for OME Appliance\", default=\"admin\") parser.add_argument(\"--password\", \"-p\",", "Corporation # # Licensed under the Apache License, Version 2.0", "--user <username> --password <<PASSWORD>> --groupname \"Random Test Group\" \"\"\" import", "the Apache License, Version 2.0 (the \"License\"); # you may", "create a new static group. The user is responsible for", "# Python script using OME API to create a new", "over Basic Authentication Note that the credentials entered are not", "password, group_name): \"\"\" Authenticate with OME and enumerate groups \"\"\"", "= {'content-type': 'application/json'} user_details = {'UserName': user_name, 'Password': password, 'SessionType':" ]
[ "# random samples for evaluation on training dataset if len(val_ds)", "train_ds = TransformedDataset(train_ds, transform_fn=train_transforms) val_ds = TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds =", "'distributed' train_sampler = data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str): assert val_sampler ==", "assert train_sampler == 'distributed' train_sampler = data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str):", "if limit_val_num_samples is not None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds =", "= get_val_dataset(root_path) if with_sbd is not None: sbd_train_ds = get_train_noval_sbdataset(with_sbd)", "drop_last=False) train_eval_loader = DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False)", "val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False) train_loader = DataLoader(train_ds, shuffle=train_sampler is None,", "num_workers: int = 8, val_batch_size: Optional[int] = None, pin_memory: bool", "limit_val_num_samples is not None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds,", "is not None: sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds, sbd_train_ds])", "typing import Callable, Optional, Tuple, Union import numpy as np", "samples for evaluation on training dataset if len(val_ds) < len(train_ds):", "import get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path: str, train_transforms: Callable,", "import Subset, ConcatDataset import torch.utils.data.distributed as data_dist from dataflow.datasets import", "== 'distributed' val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False) train_loader = DataLoader(train_ds, shuffle=train_sampler", "is not None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds, train_indices)", "None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds, val_indices) # random", "TransformedDataset(dataset, transform_fn=transforms) loader = DataLoader(dataset, shuffle=False, batch_size=batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False)", "on training dataset if len(val_ds) < len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)]", "= None, train_sampler: Optional[Union[Sampler, str]] = None, val_sampler: Optional[Union[Sampler, str]]", "None: sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds, sbd_train_ds]) if random_seed", "from torch.utils.data import DataLoader, Sampler from torch.utils.data.dataset import Subset, ConcatDataset", "str, train_transforms: Callable, val_transforms: Callable, batch_size: int = 16, num_workers:", "-> Tuple[DataLoader, DataLoader, DataLoader]: train_ds = get_train_dataset(root_path) val_ds = get_val_dataset(root_path)", "= data_dist.DistributedSampler(val_ds, shuffle=False) train_loader = DataLoader(train_ds, shuffle=train_sampler is None, batch_size=batch_size,", "('train', 'test'), \"Mode should be 'train' or 'test'\" get_dataset_fn =", "if limit_train_num_samples is not None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds =", "mode: str, transforms: Callable, batch_size: int = 16, num_workers: int", "8, pin_memory: bool = True, limit_num_samples: Optional[int] = None) ->", "= 8, pin_memory: bool = True, limit_num_samples: Optional[int] = None)", "return_meta=True) if limit_num_samples is not None: indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset", "sbd_train_ds]) if random_seed is not None: np.random.seed(random_seed) if limit_train_num_samples is", "not None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds, train_indices) if", "batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return train_loader, val_loader, train_eval_loader def get_inference_dataloader(root_path:", "dataflow.datasets import get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path: str, train_transforms:", "= None, limit_train_num_samples: Optional[int] = None, limit_val_num_samples: Optional[int] = None)", "data_dist from dataflow.datasets import get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path:", "train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler, str): assert train_sampler ==", "= DataLoader(train_ds, shuffle=train_sampler is None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True)", "random_seed: Optional[int] = None, train_sampler: Optional[Union[Sampler, str]] = None, val_sampler:", "train_transforms: Callable, val_transforms: Callable, batch_size: int = 16, num_workers: int", "import DataLoader, Sampler from torch.utils.data.dataset import Subset, ConcatDataset import torch.utils.data.distributed", "train_ds = get_train_dataset(root_path) val_ds = get_val_dataset(root_path) if with_sbd is not", "get_inference_dataloader(root_path: str, mode: str, transforms: Callable, batch_size: int = 16,", "< len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds, train_eval_indices) else:", "= np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset, indices) dataset = TransformedDataset(dataset, transform_fn=transforms)", "torch.utils.data.dataset import Subset, ConcatDataset import torch.utils.data.distributed as data_dist from dataflow.datasets", "if isinstance(train_sampler, str): assert train_sampler == 'distributed' train_sampler = data_dist.DistributedSampler(train_ds)", "drop_last=True) val_batch_size = batch_size * 4 if val_batch_size is None", "= get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds, sbd_train_ds]) if random_seed is not", "= Subset(dataset, indices) dataset = TransformedDataset(dataset, transform_fn=transforms) loader = DataLoader(dataset,", "def get_train_val_loaders(root_path: str, train_transforms: Callable, val_transforms: Callable, batch_size: int =", "int = 8, pin_memory: bool = True, limit_num_samples: Optional[int] =", "= TransformedDataset(dataset, transform_fn=transforms) loader = DataLoader(dataset, shuffle=False, batch_size=batch_size, num_workers=num_workers, pin_memory=pin_memory,", "from typing import Callable, Optional, Tuple, Union import numpy as", "len(val_ds) < len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds, train_eval_indices)", "None, train_sampler: Optional[Union[Sampler, str]] = None, val_sampler: Optional[Union[Sampler, str]] =", "should be 'train' or 'test'\" get_dataset_fn = get_train_dataset if mode", "Callable, val_transforms: Callable, batch_size: int = 16, num_workers: int =", "np.random.seed(random_seed) if limit_train_num_samples is not None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds", "= Subset(val_ds, val_indices) # random samples for evaluation on training", "Optional[Union[Sampler, str]] = None, with_sbd: Optional[str] = None, limit_train_num_samples: Optional[int]", "train_loader = DataLoader(train_ds, shuffle=train_sampler is None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory,", "'test'), \"Mode should be 'train' or 'test'\" get_dataset_fn = get_train_dataset", "dataset = TransformedDataset(dataset, transform_fn=transforms) loader = DataLoader(dataset, shuffle=False, batch_size=batch_size, num_workers=num_workers,", "val_batch_size val_loader = DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False)", "TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler, str): assert", "Subset, ConcatDataset import torch.utils.data.distributed as data_dist from dataflow.datasets import get_train_dataset,", "Optional[int] = None, limit_val_num_samples: Optional[int] = None) -> Tuple[DataLoader, DataLoader,", "= np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds, val_indices) # random samples for", "assert val_sampler == 'distributed' val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False) train_loader =", "limit_train_num_samples: Optional[int] = None, limit_val_num_samples: Optional[int] = None) -> Tuple[DataLoader,", "Subset(val_ds, val_indices) # random samples for evaluation on training dataset", "num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader = DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers,", "True, limit_num_samples: Optional[int] = None) -> DataLoader: assert mode in", "* 4 if val_batch_size is None else val_batch_size val_loader =", "get_train_noval_sbdataset def get_train_val_loaders(root_path: str, train_transforms: Callable, val_transforms: Callable, batch_size: int", "pin_memory: bool = True, random_seed: Optional[int] = None, train_sampler: Optional[Union[Sampler,", "get_val_dataset(root_path) if with_sbd is not None: sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds", "sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size = batch_size * 4 if val_batch_size", "if limit_num_samples is not None: indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset =", "batch_size: int = 16, num_workers: int = 8, val_batch_size: Optional[int]", "import torch.utils.data.distributed as data_dist from dataflow.datasets import get_train_dataset, get_val_dataset, TransformedDataset,", "dataset if len(val_ds) < len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds =", "transform_fn=train_transforms) val_ds = TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms) if", "return train_loader, val_loader, train_eval_loader def get_inference_dataloader(root_path: str, mode: str, transforms:", "transform_fn=val_transforms) if isinstance(train_sampler, str): assert train_sampler == 'distributed' train_sampler =", "val_transforms: Callable, batch_size: int = 16, num_workers: int = 8,", "pin_memory=pin_memory, drop_last=False) train_eval_loader = DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory,", "transform_fn=transforms) loader = DataLoader(dataset, shuffle=False, batch_size=batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return", "-> DataLoader: assert mode in ('train', 'test'), \"Mode should be", "isinstance(val_sampler, str): assert val_sampler == 'distributed' val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False)", "if isinstance(val_sampler, str): assert val_sampler == 'distributed' val_sampler = data_dist.DistributedSampler(val_ds,", "None, val_sampler: Optional[Union[Sampler, str]] = None, with_sbd: Optional[str] = None,", "train_ds = Subset(train_ds, train_indices) if limit_val_num_samples is not None: val_indices", "batch_size * 4 if val_batch_size is None else val_batch_size val_loader", "= data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str): assert val_sampler == 'distributed' val_sampler", "get_val_dataset, TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path: str, train_transforms: Callable, val_transforms: Callable,", "val_sampler: Optional[Union[Sampler, str]] = None, with_sbd: Optional[str] = None, limit_train_num_samples:", "= True, random_seed: Optional[int] = None, train_sampler: Optional[Union[Sampler, str]] =", "Optional[Union[Sampler, str]] = None, val_sampler: Optional[Union[Sampler, str]] = None, with_sbd:", "= Subset(train_ds, train_indices) if limit_val_num_samples is not None: val_indices =", "= None, val_sampler: Optional[Union[Sampler, str]] = None, with_sbd: Optional[str] =", "= ConcatDataset([train_ds, sbd_train_ds]) if random_seed is not None: np.random.seed(random_seed) if", "evaluation on training dataset if len(val_ds) < len(train_ds): train_eval_indices =", "train_sampler == 'distributed' train_sampler = data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str): assert", "Optional[str] = None, limit_train_num_samples: Optional[int] = None, limit_val_num_samples: Optional[int] =", "sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader = DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler,", "= DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return train_loader,", "= 16, num_workers: int = 8, pin_memory: bool = True,", "get_dataset_fn = get_train_dataset if mode == \"train\" else get_val_dataset dataset", "None else val_batch_size val_loader = DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers,", "4 if val_batch_size is None else val_batch_size val_loader = DataLoader(val_ds,", "is not None: indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset, indices)", "dataset = Subset(dataset, indices) dataset = TransformedDataset(dataset, transform_fn=transforms) loader =", "training dataset if len(val_ds) < len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds", "= TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler, str):", "val_sampler == 'distributed' val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False) train_loader = DataLoader(train_ds,", "mode in ('train', 'test'), \"Mode should be 'train' or 'test'\"", "random samples for evaluation on training dataset if len(val_ds) <", "drop_last=False) return train_loader, val_loader, train_eval_loader def get_inference_dataloader(root_path: str, mode: str,", "= 8, val_batch_size: Optional[int] = None, pin_memory: bool = True,", "Optional[int] = None, train_sampler: Optional[Union[Sampler, str]] = None, val_sampler: Optional[Union[Sampler,", "train_sampler = data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str): assert val_sampler == 'distributed'", "train_eval_loader def get_inference_dataloader(root_path: str, mode: str, transforms: Callable, batch_size: int", "train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds, train_indices) if limit_val_num_samples is", "else: train_eval_ds = train_ds train_ds = TransformedDataset(train_ds, transform_fn=train_transforms) val_ds =", "Optional[int] = None, pin_memory: bool = True, random_seed: Optional[int] =", "in ('train', 'test'), \"Mode should be 'train' or 'test'\" get_dataset_fn", "= None, with_sbd: Optional[str] = None, limit_train_num_samples: Optional[int] = None,", "numpy as np from torch.utils.data import DataLoader, Sampler from torch.utils.data.dataset", "Subset(train_ds, train_eval_indices) else: train_eval_ds = train_ds train_ds = TransformedDataset(train_ds, transform_fn=train_transforms)", "DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return train_loader, val_loader,", "is None else val_batch_size val_loader = DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size,", "from dataflow.datasets import get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path: str,", "is not None: np.random.seed(random_seed) if limit_train_num_samples is not None: train_indices", "if with_sbd is not None: sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds =", "isinstance(train_sampler, str): assert train_sampler == 'distributed' train_sampler = data_dist.DistributedSampler(train_ds) if", "str): assert val_sampler == 'distributed' val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False) train_loader", "== \"train\" else get_val_dataset dataset = get_dataset_fn(root_path, return_meta=True) if limit_num_samples", "train_loader, val_loader, train_eval_loader def get_inference_dataloader(root_path: str, mode: str, transforms: Callable,", "shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return train_loader, val_loader, train_eval_loader", "np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset, indices) dataset = TransformedDataset(dataset, transform_fn=transforms) loader", "True, random_seed: Optional[int] = None, train_sampler: Optional[Union[Sampler, str]] = None,", "batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size = batch_size * 4", "np from torch.utils.data import DataLoader, Sampler from torch.utils.data.dataset import Subset,", "DataLoader: assert mode in ('train', 'test'), \"Mode should be 'train'", "DataLoader, Sampler from torch.utils.data.dataset import Subset, ConcatDataset import torch.utils.data.distributed as", "dataset = get_dataset_fn(root_path, return_meta=True) if limit_num_samples is not None: indices", "val_ds = TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler,", "== 'distributed' train_sampler = data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str): assert val_sampler", "val_indices) # random samples for evaluation on training dataset if", "shuffle=False) train_loader = DataLoader(train_ds, shuffle=train_sampler is None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler,", "None, limit_train_num_samples: Optional[int] = None, limit_val_num_samples: Optional[int] = None) ->", "train_eval_ds = Subset(train_ds, train_eval_indices) else: train_eval_ds = train_ds train_ds =", "if len(val_ds) < len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds,", "= get_dataset_fn(root_path, return_meta=True) if limit_num_samples is not None: indices =", "= None, limit_val_num_samples: Optional[int] = None) -> Tuple[DataLoader, DataLoader, DataLoader]:", "pin_memory: bool = True, limit_num_samples: Optional[int] = None) -> DataLoader:", "= Subset(train_ds, train_eval_indices) else: train_eval_ds = train_ds train_ds = TransformedDataset(train_ds,", "None: indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset, indices) dataset =", "val_loader, train_eval_loader def get_inference_dataloader(root_path: str, mode: str, transforms: Callable, batch_size:", "'train' or 'test'\" get_dataset_fn = get_train_dataset if mode == \"train\"", "for evaluation on training dataset if len(val_ds) < len(train_ds): train_eval_indices", "len(train_ds): train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds, train_eval_indices) else: train_eval_ds", "bool = True, random_seed: Optional[int] = None, train_sampler: Optional[Union[Sampler, str]]", "is not None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds, val_indices)", "np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds, train_eval_indices) else: train_eval_ds = train_ds train_ds", "= train_ds train_ds = TransformedDataset(train_ds, transform_fn=train_transforms) val_ds = TransformedDataset(val_ds, transform_fn=val_transforms)", "shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader = DataLoader(train_eval_ds, shuffle=False,", "torch.utils.data.distributed as data_dist from dataflow.datasets import get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset", "None) -> DataLoader: assert mode in ('train', 'test'), \"Mode should", "data_dist.DistributedSampler(train_ds) if isinstance(val_sampler, str): assert val_sampler == 'distributed' val_sampler =", "train_indices) if limit_val_num_samples is not None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds", "transforms: Callable, batch_size: int = 16, num_workers: int = 8,", "Optional, Tuple, Union import numpy as np from torch.utils.data import", "Callable, Optional, Tuple, Union import numpy as np from torch.utils.data", "Callable, batch_size: int = 16, num_workers: int = 8, pin_memory:", "= get_train_dataset if mode == \"train\" else get_val_dataset dataset =", "if random_seed is not None: np.random.seed(random_seed) if limit_train_num_samples is not", "not None: np.random.seed(random_seed) if limit_train_num_samples is not None: train_indices =", "train_eval_indices) else: train_eval_ds = train_ds train_ds = TransformedDataset(train_ds, transform_fn=train_transforms) val_ds", "num_workers: int = 8, pin_memory: bool = True, limit_num_samples: Optional[int]", "is None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size = batch_size", "= None, pin_memory: bool = True, random_seed: Optional[int] = None,", "train_eval_indices = np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds, train_eval_indices) else: train_eval_ds =", "limit_train_num_samples is not None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds,", "np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds, train_indices) if limit_val_num_samples is not None:", "\"train\" else get_val_dataset dataset = get_dataset_fn(root_path, return_meta=True) if limit_num_samples is", "sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return train_loader, val_loader, train_eval_loader def", "not None: indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset, indices) dataset", "not None: sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds, sbd_train_ds]) if", "None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size = batch_size *", "ConcatDataset import torch.utils.data.distributed as data_dist from dataflow.datasets import get_train_dataset, get_val_dataset,", "val_batch_size = batch_size * 4 if val_batch_size is None else", "val_loader = DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader", "Tuple, Union import numpy as np from torch.utils.data import DataLoader,", "str, mode: str, transforms: Callable, batch_size: int = 16, num_workers:", "TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler, str): assert train_sampler == 'distributed' train_sampler", "get_train_dataset if mode == \"train\" else get_val_dataset dataset = get_dataset_fn(root_path,", "else val_batch_size val_loader = DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory,", "8, val_batch_size: Optional[int] = None, pin_memory: bool = True, random_seed:", "get_train_dataset(root_path) val_ds = get_val_dataset(root_path) if with_sbd is not None: sbd_train_ds", "= get_train_dataset(root_path) val_ds = get_val_dataset(root_path) if with_sbd is not None:", "\"Mode should be 'train' or 'test'\" get_dataset_fn = get_train_dataset if", "train_ds train_ds = TransformedDataset(train_ds, transform_fn=train_transforms) val_ds = TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds", "str]] = None, val_sampler: Optional[Union[Sampler, str]] = None, with_sbd: Optional[str]", "import Callable, Optional, Tuple, Union import numpy as np from", "= True, limit_num_samples: Optional[int] = None) -> DataLoader: assert mode", "get_train_val_loaders(root_path: str, train_transforms: Callable, val_transforms: Callable, batch_size: int = 16,", "val_ds = Subset(val_ds, val_indices) # random samples for evaluation on", "limit_num_samples is not None: indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset,", "Callable, batch_size: int = 16, num_workers: int = 8, val_batch_size:", "None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds, train_indices) if limit_val_num_samples", "if val_batch_size is None else val_batch_size val_loader = DataLoader(val_ds, shuffle=False,", "limit_val_num_samples: Optional[int] = None) -> Tuple[DataLoader, DataLoader, DataLoader]: train_ds =", "16, num_workers: int = 8, pin_memory: bool = True, limit_num_samples:", "= None) -> DataLoader: assert mode in ('train', 'test'), \"Mode", "import numpy as np from torch.utils.data import DataLoader, Sampler from", "get_val_dataset dataset = get_dataset_fn(root_path, return_meta=True) if limit_num_samples is not None:", "train_eval_loader = DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return", "val_ds = get_val_dataset(root_path) if with_sbd is not None: sbd_train_ds =", "= TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler, str): assert train_sampler == 'distributed'", "train_ds = ConcatDataset([train_ds, sbd_train_ds]) if random_seed is not None: np.random.seed(random_seed)", "DataLoader(train_ds, shuffle=train_sampler is None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size", "shuffle=train_sampler is None, batch_size=batch_size, num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size =", "as np from torch.utils.data import DataLoader, Sampler from torch.utils.data.dataset import", "str, transforms: Callable, batch_size: int = 16, num_workers: int =", "None: np.random.seed(random_seed) if limit_train_num_samples is not None: train_indices = np.random.permutation(len(train_ds))[:limit_train_num_samples]", "'distributed' val_sampler = data_dist.DistributedSampler(val_ds, shuffle=False) train_loader = DataLoader(train_ds, shuffle=train_sampler is", "Optional[int] = None) -> DataLoader: assert mode in ('train', 'test'),", "be 'train' or 'test'\" get_dataset_fn = get_train_dataset if mode ==", "limit_num_samples: Optional[int] = None) -> DataLoader: assert mode in ('train',", "with_sbd: Optional[str] = None, limit_train_num_samples: Optional[int] = None, limit_val_num_samples: Optional[int]", "random_seed is not None: np.random.seed(random_seed) if limit_train_num_samples is not None:", "Subset(train_ds, train_indices) if limit_val_num_samples is not None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples]", "TransformedDataset(train_ds, transform_fn=train_transforms) val_ds = TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms)", "pin_memory=pin_memory, drop_last=True) val_batch_size = batch_size * 4 if val_batch_size is", "train_sampler: Optional[Union[Sampler, str]] = None, val_sampler: Optional[Union[Sampler, str]] = None,", "batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader = DataLoader(train_eval_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size,", "batch_size: int = 16, num_workers: int = 8, pin_memory: bool", "str): assert train_sampler == 'distributed' train_sampler = data_dist.DistributedSampler(train_ds) if isinstance(val_sampler,", "Subset(dataset, indices) dataset = TransformedDataset(dataset, transform_fn=transforms) loader = DataLoader(dataset, shuffle=False,", "= 16, num_workers: int = 8, val_batch_size: Optional[int] = None,", "if mode == \"train\" else get_val_dataset dataset = get_dataset_fn(root_path, return_meta=True)", "= None) -> Tuple[DataLoader, DataLoader, DataLoader]: train_ds = get_train_dataset(root_path) val_ds", "<filename>examples/references/segmentation/pascal_voc2012/code/dataflow/dataloaders.py from typing import Callable, Optional, Tuple, Union import numpy", "DataLoader]: train_ds = get_train_dataset(root_path) val_ds = get_val_dataset(root_path) if with_sbd is", "'test'\" get_dataset_fn = get_train_dataset if mode == \"train\" else get_val_dataset", "None, with_sbd: Optional[str] = None, limit_train_num_samples: Optional[int] = None, limit_val_num_samples:", "val_batch_size is None else val_batch_size val_loader = DataLoader(val_ds, shuffle=False, sampler=val_sampler,", "indices) dataset = TransformedDataset(dataset, transform_fn=transforms) loader = DataLoader(dataset, shuffle=False, batch_size=batch_size,", "= TransformedDataset(train_ds, transform_fn=train_transforms) val_ds = TransformedDataset(val_ds, transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds,", "TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path: str, train_transforms: Callable, val_transforms: Callable, batch_size:", "as data_dist from dataflow.datasets import get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset def", "Sampler from torch.utils.data.dataset import Subset, ConcatDataset import torch.utils.data.distributed as data_dist", "None, limit_val_num_samples: Optional[int] = None) -> Tuple[DataLoader, DataLoader, DataLoader]: train_ds", "transform_fn=val_transforms) train_eval_ds = TransformedDataset(train_eval_ds, transform_fn=val_transforms) if isinstance(train_sampler, str): assert train_sampler", "assert mode in ('train', 'test'), \"Mode should be 'train' or", "or 'test'\" get_dataset_fn = get_train_dataset if mode == \"train\" else", "else get_val_dataset dataset = get_dataset_fn(root_path, return_meta=True) if limit_num_samples is not", "num_workers=num_workers, sampler=train_sampler, pin_memory=pin_memory, drop_last=True) val_batch_size = batch_size * 4 if", "int = 16, num_workers: int = 8, pin_memory: bool =", "DataLoader, DataLoader]: train_ds = get_train_dataset(root_path) val_ds = get_val_dataset(root_path) if with_sbd", "16, num_workers: int = 8, val_batch_size: Optional[int] = None, pin_memory:", "Optional[int] = None) -> Tuple[DataLoader, DataLoader, DataLoader]: train_ds = get_train_dataset(root_path)", "mode == \"train\" else get_val_dataset dataset = get_dataset_fn(root_path, return_meta=True) if", "def get_inference_dataloader(root_path: str, mode: str, transforms: Callable, batch_size: int =", "int = 16, num_workers: int = 8, val_batch_size: Optional[int] =", "not None: val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds, val_indices) #", "DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader = DataLoader(train_eval_ds,", "pin_memory=pin_memory, drop_last=False) return train_loader, val_loader, train_eval_loader def get_inference_dataloader(root_path: str, mode:", "with_sbd is not None: sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds,", "ConcatDataset([train_ds, sbd_train_ds]) if random_seed is not None: np.random.seed(random_seed) if limit_train_num_samples", "bool = True, limit_num_samples: Optional[int] = None) -> DataLoader: assert", "None) -> Tuple[DataLoader, DataLoader, DataLoader]: train_ds = get_train_dataset(root_path) val_ds =", "= np.random.permutation(len(train_ds))[:len(val_ds)] train_eval_ds = Subset(train_ds, train_eval_indices) else: train_eval_ds = train_ds", "int = 8, val_batch_size: Optional[int] = None, pin_memory: bool =", "from torch.utils.data.dataset import Subset, ConcatDataset import torch.utils.data.distributed as data_dist from", "get_train_dataset, get_val_dataset, TransformedDataset, get_train_noval_sbdataset def get_train_val_loaders(root_path: str, train_transforms: Callable, val_transforms:", "None, pin_memory: bool = True, random_seed: Optional[int] = None, train_sampler:", "str]] = None, with_sbd: Optional[str] = None, limit_train_num_samples: Optional[int] =", "= np.random.permutation(len(train_ds))[:limit_train_num_samples] train_ds = Subset(train_ds, train_indices) if limit_val_num_samples is not", "= DataLoader(val_ds, shuffle=False, sampler=val_sampler, batch_size=val_batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) train_eval_loader =", "val_indices = np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds, val_indices) # random samples", "val_batch_size: Optional[int] = None, pin_memory: bool = True, random_seed: Optional[int]", "get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds, sbd_train_ds]) if random_seed is not None:", "torch.utils.data import DataLoader, Sampler from torch.utils.data.dataset import Subset, ConcatDataset import", "sbd_train_ds = get_train_noval_sbdataset(with_sbd) train_ds = ConcatDataset([train_ds, sbd_train_ds]) if random_seed is", "indices = np.random.permutation(len(dataset))[:limit_num_samples] dataset = Subset(dataset, indices) dataset = TransformedDataset(dataset,", "get_dataset_fn(root_path, return_meta=True) if limit_num_samples is not None: indices = np.random.permutation(len(dataset))[:limit_num_samples]", "Tuple[DataLoader, DataLoader, DataLoader]: train_ds = get_train_dataset(root_path) val_ds = get_val_dataset(root_path) if", "num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return train_loader, val_loader, train_eval_loader def get_inference_dataloader(root_path: str,", "loader = DataLoader(dataset, shuffle=False, batch_size=batch_size, num_workers=num_workers, pin_memory=pin_memory, drop_last=False) return loader", "train_eval_ds = train_ds train_ds = TransformedDataset(train_ds, transform_fn=train_transforms) val_ds = TransformedDataset(val_ds,", "np.random.permutation(len(val_ds))[:limit_val_num_samples] val_ds = Subset(val_ds, val_indices) # random samples for evaluation", "= batch_size * 4 if val_batch_size is None else val_batch_size", "data_dist.DistributedSampler(val_ds, shuffle=False) train_loader = DataLoader(train_ds, shuffle=train_sampler is None, batch_size=batch_size, num_workers=num_workers,", "Union import numpy as np from torch.utils.data import DataLoader, Sampler" ]
[ "return None def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]: return", "Any]) -> Optional[User]: user = User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token = payload.get(\"token\")", "not is_saleor_token(token): return None payload = jwt_decode(token) return get_user_from_access_payload(payload) def", "settings.JWT_TTL_REFRESH, additional_payload, ) return jwt_encode(payload) def get_token_from_request(request: WSGIRequest) -> Optional[str]:", "or not user: raise jwt.InvalidTokenError( \"Invalid token. Create new one", "import graphene import jwt from django.conf import settings from django.core.handlers.wsgi", "= \"refreshToken\" PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD =", "import Any, Dict, Optional import graphene import jwt from django.conf", "proceed given operation on the Saleor side. The token which", "get_user_from_payload(payload) if user and permissions is not None: token_permissions =", "user def create_access_token_for_app(app: \"App\", user: \"User\"): \"\"\"Create access token for", "Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER) if not auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER,", "except jwt.PyJWTError: return False owner = payload.get(JWT_OWNER_FIELD) if not owner", "the Saleor side. The token which can be used by", "generated by Saleor not by plugin.\"\"\" try: payload = jwt.decode(token,", "for perm in token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff = True", "JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD", "exp_delta: Optional[timedelta], token_owner: str ) -> Dict[str, Any]: utc_now =", "\"is_supplier\": user.is_supplier, } ) if additional_payload: payload.update(additional_payload) return payload def", "user jwt token to proceed given operation on the Saleor", "= \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME =", "= payload.get(\"type\") if jwt_type not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError(", "JWT_SALEOR_OWNER_NAME, ) -> Dict[str, Any]: payload = jwt_base_payload(exp_delta, token_owner) payload.update(", "{ \"token\": user.jwt_token_key, \"email\": user.email, \"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id),", "User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token = payload.get(\"token\") if not user_jwt_token or not", "user: User, additional_payload: Optional[Dict[str, Any]] = None ) -> str:", "\"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier, }", "= True if user.effective_permissions else False return user def create_access_token_for_app(app:", "str: payload = jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, ) return", "user_jwt_token or not user: raise jwt.InvalidTokenError( \"Invalid token. Create new", "from django.conf import settings from django.core.handlers.wsgi import WSGIRequest from ..account.models", "settings from django.core.handlers.wsgi import WSGIRequest from ..account.models import User from", "str: payload = jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload ) return", ") return jwt_encode(payload) def create_refresh_token( user: User, additional_payload: Optional[Dict[str, Any]]", "None) user = get_user_from_payload(payload) if user and permissions is not", "Dict[str, Any]: return jwt.decode( token, settings.SECRET_KEY, # type: ignore algorithms=[JWT_ALGORITHM],", "permissions set is the intersection of user permissions and app", "app_id, PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums), } payload = jwt_user_payload( user,", "str: return jwt.encode( payload, settings.SECRET_KEY, # type: ignore JWT_ALGORITHM, )", "permissions and app permissions. \"\"\" app_permissions = app.permissions.all() app_permission_enums =", "AUTH_HEADER_PREFIXES: auth_token = auth[1] return auth_token def get_user_from_payload(payload: Dict[str, Any])", "-> str: payload = jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload )", "def create_access_token_for_app(app: \"App\", user: \"User\"): \"\"\"Create access token for app.", "None ) -> str: payload = jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS,", "= None ) -> str: payload = jwt_user_payload( user, JWT_REFRESH_TYPE,", "\"\"\"Create access token for app. App can use user jwt", "request.META.get(SALEOR_AUTH_HEADER) if not auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth)", "verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str, Any]]: try: return jwt_decode(token, verify_expiration=verify_expiration) except", "user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload ) return jwt_encode(payload) def create_refresh_token( user:", "True def get_user_from_access_token(token: str) -> Optional[User]: if not is_saleor_token(token): return", "if additional_payload: payload.update(additional_payload) return payload def jwt_encode(payload: Dict[str, Any]) ->", "one by using tokenCreate mutation.\" ) permissions = payload.get(PERMISSIONS_FIELD, None)", "graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier, } ) if additional_payload:", "return payload def jwt_user_payload( user: User, token_type: str, exp_delta: Optional[timedelta],", "token to proceed given operation on the Saleor side. The", "get_user_from_payload(payload: Dict[str, Any]) -> Optional[User]: user = User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token", "def jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str, Any]]: try:", "not auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth) == 2", "jwt.PyJWTError: return None def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]:", "= payload.get(JWT_OWNER_FIELD) if not owner or owner != JWT_SALEOR_OWNER_NAME: return", "return user def is_saleor_token(token: str) -> bool: \"\"\"Confirm that token", "jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload ) return jwt_encode(payload) def create_refresh_token(", "user_jwt_token: raise jwt.InvalidTokenError( \"Invalid token. Create new one by using", "# type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, ) def create_token(payload: Dict[str,", "token. Create new one by using tokenCreate mutation.\" ) return", "additional_payload, ) return jwt_encode(payload) def get_token_from_request(request: WSGIRequest) -> Optional[str]: auth_token", "Dict[str, Any]) -> Optional[User]: user = User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token =", ") permissions = payload.get(PERMISSIONS_FIELD, None) user = get_user_from_payload(payload) if user", "= request.META.get(SALEOR_AUTH_HEADER) if not auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if", "jwt_type not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid token. Create", "user.id), \"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier, } ) if additional_payload: payload.update(additional_payload)", "\"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums), } payload = jwt_user_payload(", "token_owner: str ) -> Dict[str, Any]: utc_now = datetime.utcnow() payload", "payload = jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload ) return jwt_encode(payload)", "..app.models import App from .permissions import ( get_permission_names, get_permissions_from_codenames, get_permissions_from_names,", "given operation on the Saleor side. The token which can", "App has additional field defining the permissions assigned to it.", "app_permission_enums = get_permission_names(app_permissions) permissions = user.effective_permissions user_permission_enums = get_permission_names(permissions) app_id", "[JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid token. Create new one by", "= [perm.codename for perm in token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff", "App can use user jwt token to proceed given operation", "permissions = payload.get(PERMISSIONS_FIELD, None) user = get_user_from_payload(payload) if user and", "False owner = payload.get(JWT_OWNER_FIELD) if not owner or owner !=", "WSGIRequest from ..account.models import User from ..app.models import App from", "from .permissions import ( get_permission_names, get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM =", "= \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE", "side. The token which can be used by App has", ".permissions import ( get_permission_names, get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM = \"HS256\"", "= get_user_from_payload(payload) if user and permissions is not None: token_permissions", "payload = jwt_user_payload( user, JWT_THIRDPARTY_ACCESS_TYPE, exp_delta=settings.JWT_TTL_APP_ACCESS, additional_payload=additional_payload, ) return jwt_encode(payload)", "create_access_token_for_app(app: \"App\", user: \"User\"): \"\"\"Create access token for app. App", "if exp_delta: payload[\"exp\"] = utc_now + exp_delta return payload def", "user.jwt_token_key, \"email\": user.email, \"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff,", "in token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff = True if user.effective_permissions", "permissions assigned to it. The permissions set is the intersection", "return get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict) -> Optional[User]: jwt_type = payload.get(\"type\")", "plugin.\"\"\" try: payload = jwt.decode(token, options={\"verify_signature\": False}) except jwt.PyJWTError: return", "user.is_staff, \"is_supplier\": user.is_supplier, } ) if additional_payload: payload.update(additional_payload) return payload", "create_access_token( user: User, additional_payload: Optional[Dict[str, Any]] = None ) ->", "payload.get(\"token\") if not user_jwt_token or not user: raise jwt.InvalidTokenError( \"Invalid", "user.jwt_token_key != user_jwt_token: raise jwt.InvalidTokenError( \"Invalid token. Create new one", "graphene.Node.to_global_id(\"App\", app.id) additional_payload = { \"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums &", "def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]: return jwt.decode( token,", "permissions. \"\"\" app_permissions = app.permissions.all() app_permission_enums = get_permission_names(app_permissions) permissions =", "jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, ) return jwt_encode(payload) def get_token_from_request(request:", "payload, settings.SECRET_KEY, # type: ignore JWT_ALGORITHM, ) def jwt_decode_with_exception_handler( token:", "Dict[str, Any], exp_delta: timedelta) -> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload)", "if not user_jwt_token or not user: raise jwt.InvalidTokenError( \"Invalid token.", "str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]: return jwt.decode( token, settings.SECRET_KEY, #", ") return user def is_saleor_token(token: str) -> bool: \"\"\"Confirm that", "def get_user_from_payload(payload: Dict[str, Any]) -> Optional[User]: user = User.objects.filter(email=payload[\"email\"], is_active=True).first()", "return jwt.decode( token, settings.SECRET_KEY, # type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration},", "def get_user_from_access_token(token: str) -> Optional[User]: if not is_saleor_token(token): return None", "get_permissions_from_codenames(token_codenames) user.is_staff = True if user.effective_permissions else False return user", "-> Dict[str, Any]: payload = jwt_base_payload(exp_delta, token_owner) payload.update( { \"token\":", "return payload def jwt_encode(payload: Dict[str, Any]) -> str: return jwt.encode(", "JWT_OWNER_FIELD = \"owner\" def jwt_base_payload( exp_delta: Optional[timedelta], token_owner: str )", "auth[1] return auth_token def get_user_from_payload(payload: Dict[str, Any]) -> Optional[User]: user", "mutation.\" ) return user def is_saleor_token(token: str) -> bool: \"\"\"Confirm", "Create new one by using tokenCreate mutation.\" ) permissions =", "bool: \"\"\"Confirm that token was generated by Saleor not by", "payload def jwt_user_payload( user: User, token_type: str, exp_delta: Optional[timedelta], additional_payload:", "mutation.\" ) permissions = payload.get(PERMISSIONS_FIELD, None) user = get_user_from_payload(payload) if", "request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth) == 2 and auth[0].upper() in AUTH_HEADER_PREFIXES:", "utc_now, JWT_OWNER_FIELD: token_owner} if exp_delta: payload[\"exp\"] = utc_now + exp_delta", "import ( get_permission_names, get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER", "App from .permissions import ( get_permission_names, get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM", "and app permissions. \"\"\" app_permissions = app.permissions.all() app_permission_enums = get_permission_names(app_permissions)", "str) -> bool: \"\"\"Confirm that token was generated by Saleor", "app.id) additional_payload = { \"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums),", "payload.get(JWT_OWNER_FIELD) if not owner or owner != JWT_SALEOR_OWNER_NAME: return False", "utc_now = datetime.utcnow() payload = {\"iat\": utc_now, JWT_OWNER_FIELD: token_owner} if", "jwt_base_payload(exp_delta, token_owner) payload.update( { \"token\": user.jwt_token_key, \"email\": user.email, \"type\": token_type,", "if not auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth) ==", "not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid token. Create new", "payload.update(additional_payload) return payload def jwt_encode(payload: Dict[str, Any]) -> str: return", "Optional[timedelta], token_owner: str ) -> Dict[str, Any]: utc_now = datetime.utcnow()", "get_token_from_request(request: WSGIRequest) -> Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER) if not auth_token:", "\"access\" JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\"", "from ..app.models import App from .permissions import ( get_permission_names, get_permissions_from_codenames,", "token_owner: str = JWT_SALEOR_OWNER_NAME, ) -> Dict[str, Any]: payload =", "in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid token. Create new one", "[\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE =", "= \"access\" JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME =", "JWT_ALGORITHM, ) def jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str,", "str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def create_access_token( user: User, additional_payload:", "verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]: return jwt.decode( token, settings.SECRET_KEY, # type:", "timedelta) -> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def create_access_token( user:", "permissions is not None: token_permissions = get_permissions_from_names(permissions) token_codenames = [perm.codename", "= graphene.Node.to_global_id(\"App\", app.id) additional_payload = { \"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums", "return False owner = payload.get(JWT_OWNER_FIELD) if not owner or owner", "the permissions assigned to it. The permissions set is the", "app.permissions.all() app_permission_enums = get_permission_names(app_permissions) permissions = user.effective_permissions user_permission_enums = get_permission_names(permissions)", "The token which can be used by App has additional", "datetime import datetime, timedelta from typing import Any, Dict, Optional", "token_type: str, exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str, Any]] = None, token_owner:", "def is_saleor_token(token: str) -> bool: \"\"\"Confirm that token was generated", "= \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE", "\"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier, } ) if additional_payload: payload.update(additional_payload) return", "JWT_OWNER_FIELD: token_owner} if exp_delta: payload[\"exp\"] = utc_now + exp_delta return", "-> str: return jwt.encode( payload, settings.SECRET_KEY, # type: ignore JWT_ALGORITHM,", "-> Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER) if not auth_token: auth =", "exp_delta: timedelta) -> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def create_access_token(", "== 2 and auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token = auth[1] return", "assigned to it. The permissions set is the intersection of", "user.effective_permissions user_permission_enums = get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\", app.id) additional_payload =", ") -> str: payload = jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload", "\"saleor\" JWT_OWNER_FIELD = \"owner\" def jwt_base_payload( exp_delta: Optional[timedelta], token_owner: str", "settings.SECRET_KEY, # type: ignore JWT_ALGORITHM, ) def jwt_decode_with_exception_handler( token: str,", "new one by using tokenCreate mutation.\" ) if user.jwt_token_key !=", "DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE = \"access\"", "not owner or owner != JWT_SALEOR_OWNER_NAME: return False return True", "operation on the Saleor side. The token which can be", "def get_token_from_request(request: WSGIRequest) -> Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER) if not", "not None: token_permissions = get_permissions_from_names(permissions) token_codenames = [perm.codename for perm", "token_codenames = [perm.codename for perm in token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames)", "not by plugin.\"\"\" try: payload = jwt.decode(token, options={\"verify_signature\": False}) except", ") -> Optional[Dict[str, Any]]: try: return jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError:", "= get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\", app.id) additional_payload = { \"app\":", "by Saleor not by plugin.\"\"\" try: payload = jwt.decode(token, options={\"verify_signature\":", "import App from .permissions import ( get_permission_names, get_permissions_from_codenames, get_permissions_from_names, )", "from django.core.handlers.wsgi import WSGIRequest from ..account.models import User from ..app.models", "is_saleor_token(token): return None payload = jwt_decode(token) return get_user_from_access_payload(payload) def get_user_from_access_payload(payload:", "if not is_saleor_token(token): return None payload = jwt_decode(token) return get_user_from_access_payload(payload)", "\"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\"", ") -> Dict[str, Any]: payload = jwt_base_payload(exp_delta, token_owner) payload.update( {", "django.conf import settings from django.core.handlers.wsgi import WSGIRequest from ..account.models import", "Any]: utc_now = datetime.utcnow() payload = {\"iat\": utc_now, JWT_OWNER_FIELD: token_owner}", "= utc_now + exp_delta return payload def jwt_user_payload( user: User,", "import datetime, timedelta from typing import Any, Dict, Optional import", "one by using tokenCreate mutation.\" ) return user def is_saleor_token(token:", "token_owner} if exp_delta: payload[\"exp\"] = utc_now + exp_delta return payload", "= \"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES =", "Dict[str, Any]: utc_now = datetime.utcnow() payload = {\"iat\": utc_now, JWT_OWNER_FIELD:", "not user_jwt_token or not user: raise jwt.InvalidTokenError( \"Invalid token. Create", "-> Optional[User]: user = User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token = payload.get(\"token\") if", "= jwt_decode(token) return get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict) -> Optional[User]: jwt_type", "Saleor not by plugin.\"\"\" try: payload = jwt.decode(token, options={\"verify_signature\": False})", "by using tokenCreate mutation.\" ) permissions = payload.get(PERMISSIONS_FIELD, None) user", "\"email\": user.email, \"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff, \"is_supplier\":", "options={\"verify_exp\": verify_expiration}, ) def create_token(payload: Dict[str, Any], exp_delta: timedelta) ->", "def get_user_from_access_payload(payload: dict) -> Optional[User]: jwt_type = payload.get(\"type\") if jwt_type", "[perm.codename for perm in token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff =", "jwt.decode(token, options={\"verify_signature\": False}) except jwt.PyJWTError: return False owner = payload.get(JWT_OWNER_FIELD)", "Any]) -> str: return jwt.encode( payload, settings.SECRET_KEY, # type: ignore", "len(auth) == 2 and auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token = auth[1]", "get_user_from_access_payload(payload: dict) -> Optional[User]: jwt_type = payload.get(\"type\") if jwt_type not", "None payload = jwt_decode(token) return get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict) ->", "jwt from django.conf import settings from django.core.handlers.wsgi import WSGIRequest from", "perm in token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff = True if", "auth_token def get_user_from_payload(payload: Dict[str, Any]) -> Optional[User]: user = User.objects.filter(email=payload[\"email\"],", "user: \"User\"): \"\"\"Create access token for app. App can use", "( get_permission_names, get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER =", "auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth) == 2 and auth[0].upper()", "user_jwt_token = payload.get(\"token\") if not user_jwt_token or not user: raise", "user def is_saleor_token(token: str) -> bool: \"\"\"Confirm that token was", "it. The permissions set is the intersection of user permissions", "str = JWT_SALEOR_OWNER_NAME, ) -> Dict[str, Any]: payload = jwt_base_payload(exp_delta,", "= User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token = payload.get(\"token\") if not user_jwt_token or", "to it. The permissions set is the intersection of user", "get_user_from_access_token(token: str) -> Optional[User]: if not is_saleor_token(token): return None payload", "\"\").split(maxsplit=1) if len(auth) == 2 and auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token", "additional_payload: Optional[Dict[str, Any]] = None ) -> str: payload =", "JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD", "user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff = True if user.effective_permissions else False", ") def jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str, Any]]:", "tokenCreate mutation.\" ) permissions = payload.get(PERMISSIONS_FIELD, None) user = get_user_from_payload(payload)", "use user jwt token to proceed given operation on the", "auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token = auth[1] return auth_token def get_user_from_payload(payload:", "ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, ) def create_token(payload: Dict[str, Any], exp_delta:", "\"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD = \"owner\" def jwt_base_payload( exp_delta:", "token_permissions = get_permissions_from_names(permissions) token_codenames = [perm.codename for perm in token_permissions]", "jwt_user_payload( user: User, token_type: str, exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str, Any]]", "= request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth) == 2 and auth[0].upper() in", "def create_access_token( user: User, additional_payload: Optional[Dict[str, Any]] = None )", "-> bool: \"\"\"Confirm that token was generated by Saleor not", "else False return user def create_access_token_for_app(app: \"App\", user: \"User\"): \"\"\"Create", "Optional[User]: jwt_type = payload.get(\"type\") if jwt_type not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]:", "by using tokenCreate mutation.\" ) if user.jwt_token_key != user_jwt_token: raise", "and permissions is not None: token_permissions = get_permissions_from_names(permissions) token_codenames =", "False}) except jwt.PyJWTError: return False owner = payload.get(JWT_OWNER_FIELD) if not", "return jwt.encode( payload, settings.SECRET_KEY, # type: ignore JWT_ALGORITHM, ) def", "from typing import Any, Dict, Optional import graphene import jwt", "tokenCreate mutation.\" ) if user.jwt_token_key != user_jwt_token: raise jwt.InvalidTokenError( \"Invalid", "import WSGIRequest from ..account.models import User from ..app.models import App", "additional_payload ) return jwt_encode(payload) def create_refresh_token( user: User, additional_payload: Optional[Dict[str,", "exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str, Any]] = None, token_owner: str =", "Dict[str, Any]) -> str: return jwt.encode( payload, settings.SECRET_KEY, # type:", "JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid token. Create new one by using", "= get_permissions_from_names(permissions) token_codenames = [perm.codename for perm in token_permissions] user.effective_permissions", "algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, ) def create_token(payload: Dict[str, Any], exp_delta: timedelta)", "field defining the permissions assigned to it. The permissions set", "except jwt.PyJWTError: return None def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str,", "-> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def create_access_token( user: User,", "owner = payload.get(JWT_OWNER_FIELD) if not owner or owner != JWT_SALEOR_OWNER_NAME:", "JWT_SALEOR_OWNER_NAME: return False return True def get_user_from_access_token(token: str) -> Optional[User]:", "= datetime.utcnow() payload = {\"iat\": utc_now, JWT_OWNER_FIELD: token_owner} if exp_delta:", "JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD = \"owner\" def jwt_base_payload( exp_delta: Optional[timedelta],", "payload def jwt_encode(payload: Dict[str, Any]) -> str: return jwt.encode( payload,", "return True def get_user_from_access_token(token: str) -> Optional[User]: if not is_saleor_token(token):", "\"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD = \"permissions\"", "app permissions. \"\"\" app_permissions = app.permissions.all() app_permission_enums = get_permission_names(app_permissions) permissions", "verify_expiration=verify_expiration) except jwt.PyJWTError: return None def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) ->", "= auth[1] return auth_token def get_user_from_payload(payload: Dict[str, Any]) -> Optional[User]:", "Any, Dict, Optional import graphene import jwt from django.conf import", "jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str, Any]]: try: return", "\"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier, } ) if", "new one by using tokenCreate mutation.\" ) return user def", "auth_token = request.META.get(SALEOR_AUTH_HEADER) if not auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1)", "payload = jwt_base_payload(exp_delta, token_owner) payload.update( { \"token\": user.jwt_token_key, \"email\": user.email,", "from ..account.models import User from ..app.models import App from .permissions", "Optional[User]: if not is_saleor_token(token): return None payload = jwt_decode(token) return", "The permissions set is the intersection of user permissions and", "None def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]: return jwt.decode(", "= jwt.decode(token, options={\"verify_signature\": False}) except jwt.PyJWTError: return False owner =", "<filename>saleor/core/jwt.py from datetime import datetime, timedelta from typing import Any,", "was generated by Saleor not by plugin.\"\"\" try: payload =", "token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def create_access_token( user: User, additional_payload: Optional[Dict[str, Any]]", "JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload ) return jwt_encode(payload) def create_refresh_token( user: User,", ") -> str: payload = jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload,", "Dict, Optional import graphene import jwt from django.conf import settings", "user permissions and app permissions. \"\"\" app_permissions = app.permissions.all() app_permission_enums", "additional_payload: payload.update(additional_payload) return payload def jwt_encode(payload: Dict[str, Any]) -> str:", ") def create_token(payload: Dict[str, Any], exp_delta: timedelta) -> str: payload.update(jwt_base_payload(exp_delta,", "def create_refresh_token( user: User, additional_payload: Optional[Dict[str, Any]] = None )", "using tokenCreate mutation.\" ) permissions = payload.get(PERMISSIONS_FIELD, None) user =", ") if additional_payload: payload.update(additional_payload) return payload def jwt_encode(payload: Dict[str, Any])", "-> str: payload = jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, )", "payload.update( { \"token\": user.jwt_token_key, \"email\": user.email, \"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\",", "jwt.PyJWTError: return False owner = payload.get(JWT_OWNER_FIELD) if not owner or", "django.core.handlers.wsgi import WSGIRequest from ..account.models import User from ..app.models import", "payload.get(PERMISSIONS_FIELD, None) user = get_user_from_payload(payload) if user and permissions is", "token, settings.SECRET_KEY, # type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, ) def", "try: payload = jwt.decode(token, options={\"verify_signature\": False}) except jwt.PyJWTError: return False", "jwt_type = payload.get(\"type\") if jwt_type not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise", "return jwt_encode(payload) def get_token_from_request(request: WSGIRequest) -> Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER)", "get_permission_names(app_permissions) permissions = user.effective_permissions user_permission_enums = get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\",", "settings.JWT_TTL_ACCESS, additional_payload ) return jwt_encode(payload) def create_refresh_token( user: User, additional_payload:", "create_refresh_token( user: User, additional_payload: Optional[Dict[str, Any]] = None ) ->", "set is the intersection of user permissions and app permissions.", "{\"iat\": utc_now, JWT_OWNER_FIELD: token_owner} if exp_delta: payload[\"exp\"] = utc_now +", "token which can be used by App has additional field", "owner or owner != JWT_SALEOR_OWNER_NAME: return False return True def", "type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, ) def create_token(payload: Dict[str, Any],", "+ exp_delta return payload def jwt_user_payload( user: User, token_type: str,", "ignore JWT_ALGORITHM, ) def jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE ) ->", "{ \"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums), } payload =", "str ) -> Dict[str, Any]: utc_now = datetime.utcnow() payload =", "if len(auth) == 2 and auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token =", "jwt_encode(payload: Dict[str, Any]) -> str: return jwt.encode( payload, settings.SECRET_KEY, #", "-> Dict[str, Any]: return jwt.decode( token, settings.SECRET_KEY, # type: ignore", "user.effective_permissions else False return user def create_access_token_for_app(app: \"App\", user: \"User\"):", "def create_token(payload: Dict[str, Any], exp_delta: timedelta) -> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME))", "= \"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD = \"owner\" def jwt_base_payload(", "intersection of user permissions and app permissions. \"\"\" app_permissions =", "return jwt_encode(payload) def create_refresh_token( user: User, additional_payload: Optional[Dict[str, Any]] =", "Optional[timedelta], additional_payload: Optional[Dict[str, Any]] = None, token_owner: str = JWT_SALEOR_OWNER_NAME,", "is_active=True).first() user_jwt_token = payload.get(\"token\") if not user_jwt_token or not user:", "datetime.utcnow() payload = {\"iat\": utc_now, JWT_OWNER_FIELD: token_owner} if exp_delta: payload[\"exp\"]", "jwt_base_payload( exp_delta: Optional[timedelta], token_owner: str ) -> Dict[str, Any]: utc_now", "} payload = jwt_user_payload( user, JWT_THIRDPARTY_ACCESS_TYPE, exp_delta=settings.JWT_TTL_APP_ACCESS, additional_payload=additional_payload, ) return", "..account.models import User from ..app.models import App from .permissions import", "permissions = user.effective_permissions user_permission_enums = get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\", app.id)", "for app. App can use user jwt token to proceed", "user.is_supplier, } ) if additional_payload: payload.update(additional_payload) return payload def jwt_encode(payload:", "AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE = \"refresh\"", "Any]: payload = jwt_base_payload(exp_delta, token_owner) payload.update( { \"token\": user.jwt_token_key, \"email\":", "create_token(payload: Dict[str, Any], exp_delta: timedelta) -> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return", "app_permissions = app.permissions.all() app_permission_enums = get_permission_names(app_permissions) permissions = user.effective_permissions user_permission_enums", "-> Optional[User]: jwt_type = payload.get(\"type\") if jwt_type not in [JWT_ACCESS_TYPE,", "is the intersection of user permissions and app permissions. \"\"\"", "str) -> Optional[User]: if not is_saleor_token(token): return None payload =", "\"token\": user.jwt_token_key, \"email\": user.email, \"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\":", "\"BEARER\"] JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\"", "= None ) -> str: payload = jwt_user_payload( user, JWT_ACCESS_TYPE,", "Create new one by using tokenCreate mutation.\" ) if user.jwt_token_key", "\"\"\"Confirm that token was generated by Saleor not by plugin.\"\"\"", "user and permissions is not None: token_permissions = get_permissions_from_names(permissions) token_codenames", "= jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, ) return jwt_encode(payload) def", "return auth_token def get_user_from_payload(payload: Dict[str, Any]) -> Optional[User]: user =", "return None payload = jwt_decode(token) return get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict)", "jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE) -> Dict[str, Any]: return jwt.decode( token, settings.SECRET_KEY,", "can use user jwt token to proceed given operation on", "JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME", "token for app. App can use user jwt token to", "get_permissions_from_names, ) JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER =", "the intersection of user permissions and app permissions. \"\"\" app_permissions", "datetime, timedelta from typing import Any, Dict, Optional import graphene", "user = get_user_from_payload(payload) if user and permissions is not None:", "of user permissions and app permissions. \"\"\" app_permissions = app.permissions.all()", "timedelta from typing import Any, Dict, Optional import graphene import", ") -> Dict[str, Any]: utc_now = datetime.utcnow() payload = {\"iat\":", "PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums), } payload = jwt_user_payload( user, JWT_THIRDPARTY_ACCESS_TYPE,", "auth_token = auth[1] return auth_token def get_user_from_payload(payload: Dict[str, Any]) ->", "token was generated by Saleor not by plugin.\"\"\" try: payload", "by plugin.\"\"\" try: payload = jwt.decode(token, options={\"verify_signature\": False}) except jwt.PyJWTError:", "jwt_encode(payload) def get_token_from_request(request: WSGIRequest) -> Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER) if", "Any]] = None, token_owner: str = JWT_SALEOR_OWNER_NAME, ) -> Dict[str,", "User from ..app.models import App from .permissions import ( get_permission_names,", ") return jwt_encode(payload) def get_token_from_request(request: WSGIRequest) -> Optional[str]: auth_token =", "app_id = graphene.Node.to_global_id(\"App\", app.id) additional_payload = { \"app\": app_id, PERMISSIONS_FIELD:", "\"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE =", "str, verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str, Any]]: try: return jwt_decode(token, verify_expiration=verify_expiration)", "return user def create_access_token_for_app(app: \"App\", user: \"User\"): \"\"\"Create access token", "mutation.\" ) if user.jwt_token_key != user_jwt_token: raise jwt.InvalidTokenError( \"Invalid token.", "jwt.decode( token, settings.SECRET_KEY, # type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, )", "additional field defining the permissions assigned to it. The permissions", "return jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError: return None def jwt_decode(token: str,", "owner != JWT_SALEOR_OWNER_NAME: return False return True def get_user_from_access_token(token: str)", "if user.jwt_token_key != user_jwt_token: raise jwt.InvalidTokenError( \"Invalid token. Create new", "= get_permissions_from_codenames(token_codenames) user.is_staff = True if user.effective_permissions else False return", "token. Create new one by using tokenCreate mutation.\" ) if", "-> Optional[User]: if not is_saleor_token(token): return None payload = jwt_decode(token)", "get_permission_names, get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\"", "that token was generated by Saleor not by plugin.\"\"\" try:", "WSGIRequest) -> Optional[str]: auth_token = request.META.get(SALEOR_AUTH_HEADER) if not auth_token: auth", "import settings from django.core.handlers.wsgi import WSGIRequest from ..account.models import User", "-> Optional[Dict[str, Any]]: try: return jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError: return", "exp_delta: payload[\"exp\"] = utc_now + exp_delta return payload def jwt_user_payload(", "get_permissions_from_names(permissions) token_codenames = [perm.codename for perm in token_permissions] user.effective_permissions =", "PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD = \"owner\" def", "options={\"verify_signature\": False}) except jwt.PyJWTError: return False owner = payload.get(JWT_OWNER_FIELD) if", "= jwt_user_payload( user, JWT_ACCESS_TYPE, settings.JWT_TTL_ACCESS, additional_payload ) return jwt_encode(payload) def", "} ) if additional_payload: payload.update(additional_payload) return payload def jwt_encode(payload: Dict[str,", "get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\", app.id) additional_payload = { \"app\": app_id,", "using tokenCreate mutation.\" ) if user.jwt_token_key != user_jwt_token: raise jwt.InvalidTokenError(", "= payload.get(\"token\") if not user_jwt_token or not user: raise jwt.InvalidTokenError(", "get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict) -> Optional[User]: jwt_type = payload.get(\"type\") if", "user = User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token = payload.get(\"token\") if not user_jwt_token", "token_permissions] user.effective_permissions = get_permissions_from_codenames(token_codenames) user.is_staff = True if user.effective_permissions else", "is not None: token_permissions = get_permissions_from_names(permissions) token_codenames = [perm.codename for", "user_permission_enums), } payload = jwt_user_payload( user, JWT_THIRDPARTY_ACCESS_TYPE, exp_delta=settings.JWT_TTL_APP_ACCESS, additional_payload=additional_payload, )", "= \"owner\" def jwt_base_payload( exp_delta: Optional[timedelta], token_owner: str ) ->", "access token for app. App can use user jwt token", "token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier, } )", "User, additional_payload: Optional[Dict[str, Any]] = None ) -> str: payload", "\"refreshToken\" PERMISSIONS_FIELD = \"permissions\" JWT_SALEOR_OWNER_NAME = \"saleor\" JWT_OWNER_FIELD = \"owner\"", "Any]] = None ) -> str: payload = jwt_user_payload( user,", "\"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\",", "\"owner\" def jwt_base_payload( exp_delta: Optional[timedelta], token_owner: str ) -> Dict[str,", "Optional import graphene import jwt from django.conf import settings from", "in AUTH_HEADER_PREFIXES: auth_token = auth[1] return auth_token def get_user_from_payload(payload: Dict[str,", "JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, ) return jwt_encode(payload) def get_token_from_request(request: WSGIRequest) ->", "is_saleor_token(token: str) -> bool: \"\"\"Confirm that token was generated by", "\"\"\" app_permissions = app.permissions.all() app_permission_enums = get_permission_names(app_permissions) permissions = user.effective_permissions", "or owner != JWT_SALEOR_OWNER_NAME: return False return True def get_user_from_access_token(token:", "if not owner or owner != JWT_SALEOR_OWNER_NAME: return False return", "Any]]: try: return jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError: return None def", "\"Invalid token. Create new one by using tokenCreate mutation.\" )", "2 and auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token = auth[1] return auth_token", "Any], exp_delta: timedelta) -> str: payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def", ") if user.jwt_token_key != user_jwt_token: raise jwt.InvalidTokenError( \"Invalid token. Create", "typing import Any, Dict, Optional import graphene import jwt from", "None ) -> str: payload = jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH,", "payload = jwt.decode(token, options={\"verify_signature\": False}) except jwt.PyJWTError: return False owner", "str, exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str, Any]] = None, token_owner: str", "= get_permission_names(app_permissions) permissions = user.effective_permissions user_permission_enums = get_permission_names(permissions) app_id =", "\"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE =", "and auth[0].upper() in AUTH_HEADER_PREFIXES: auth_token = auth[1] return auth_token def", "user: User, token_type: str, exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str, Any]] =", "additional_payload = { \"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums), }", "dict) -> Optional[User]: jwt_type = payload.get(\"type\") if jwt_type not in", "Optional[Dict[str, Any]] = None, token_owner: str = JWT_SALEOR_OWNER_NAME, ) ->", "!= JWT_SALEOR_OWNER_NAME: return False return True def get_user_from_access_token(token: str) ->", "def jwt_encode(payload: Dict[str, Any]) -> str: return jwt.encode( payload, settings.SECRET_KEY,", "user_permission_enums = get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\", app.id) additional_payload = {", "by App has additional field defining the permissions assigned to", "True if user.effective_permissions else False return user def create_access_token_for_app(app: \"App\",", "which can be used by App has additional field defining", "jwt.InvalidTokenError( \"Invalid token. Create new one by using tokenCreate mutation.\"", "= {\"iat\": utc_now, JWT_OWNER_FIELD: token_owner} if exp_delta: payload[\"exp\"] = utc_now", "None, token_owner: str = JWT_SALEOR_OWNER_NAME, ) -> Dict[str, Any]: payload", "by using tokenCreate mutation.\" ) return user def is_saleor_token(token: str)", "= payload.get(PERMISSIONS_FIELD, None) user = get_user_from_payload(payload) if user and permissions", "settings.SECRET_KEY, # type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\": verify_expiration}, ) def create_token(payload:", "!= user_jwt_token: raise jwt.InvalidTokenError( \"Invalid token. Create new one by", "not user: raise jwt.InvalidTokenError( \"Invalid token. Create new one by", "Saleor side. The token which can be used by App", "jwt.encode( payload, settings.SECRET_KEY, # type: ignore JWT_ALGORITHM, ) def jwt_decode_with_exception_handler(", "auth_token: auth = request.META.get(DEFAULT_AUTH_HEADER, \"\").split(maxsplit=1) if len(auth) == 2 and", "import User from ..app.models import App from .permissions import (", "be used by App has additional field defining the permissions", "additional_payload: Optional[Dict[str, Any]] = None, token_owner: str = JWT_SALEOR_OWNER_NAME, )", "if user and permissions is not None: token_permissions = get_permissions_from_names(permissions)", "list(app_permission_enums & user_permission_enums), } payload = jwt_user_payload( user, JWT_THIRDPARTY_ACCESS_TYPE, exp_delta=settings.JWT_TTL_APP_ACCESS,", "user.is_staff = True if user.effective_permissions else False return user def", "has additional field defining the permissions assigned to it. The", "exp_delta return payload def jwt_user_payload( user: User, token_type: str, exp_delta:", "payload.get(\"type\") if jwt_type not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid", "app. App can use user jwt token to proceed given", "defining the permissions assigned to it. The permissions set is", "= JWT_SALEOR_OWNER_NAME, ) -> Dict[str, Any]: payload = jwt_base_payload(exp_delta, token_owner)", "get_permissions_from_codenames, get_permissions_from_names, ) JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER", "utc_now + exp_delta return payload def jwt_user_payload( user: User, token_type:", "token. Create new one by using tokenCreate mutation.\" ) permissions", "try: return jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError: return None def jwt_decode(token:", "False return user def create_access_token_for_app(app: \"App\", user: \"User\"): \"\"\"Create access", "\"User\"): \"\"\"Create access token for app. App can use user", "to proceed given operation on the Saleor side. The token", "using tokenCreate mutation.\" ) return user def is_saleor_token(token: str) ->", "payload = jwt_user_payload( user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, ) return jwt_encode(payload)", "None: token_permissions = get_permissions_from_names(permissions) token_codenames = [perm.codename for perm in", "Optional[Dict[str, Any]]: try: return jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError: return None", "-> Dict[str, Any]: utc_now = datetime.utcnow() payload = {\"iat\": utc_now,", "one by using tokenCreate mutation.\" ) if user.jwt_token_key != user_jwt_token:", "tokenCreate mutation.\" ) return user def is_saleor_token(token: str) -> bool:", "False return True def get_user_from_access_token(token: str) -> Optional[User]: if not", "\"App\", user: \"User\"): \"\"\"Create access token for app. App can", "Any]: return jwt.decode( token, settings.SECRET_KEY, # type: ignore algorithms=[JWT_ALGORITHM], options={\"verify_exp\":", "payload.update(jwt_base_payload(exp_delta, token_owner=JWT_SALEOR_OWNER_NAME)) return jwt_encode(payload) def create_access_token( user: User, additional_payload: Optional[Dict[str,", "def jwt_user_payload( user: User, token_type: str, exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str,", "if jwt_type not in [JWT_ACCESS_TYPE, JWT_THIRDPARTY_ACCESS_TYPE]: raise jwt.InvalidTokenError( \"Invalid token.", "graphene import jwt from django.conf import settings from django.core.handlers.wsgi import", "User, token_type: str, exp_delta: Optional[timedelta], additional_payload: Optional[Dict[str, Any]] = None,", "= \"saleor\" JWT_OWNER_FIELD = \"owner\" def jwt_base_payload( exp_delta: Optional[timedelta], token_owner:", "Create new one by using tokenCreate mutation.\" ) return user", "SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES = [\"JWT\", \"BEARER\"]", "return jwt_encode(payload) def create_access_token( user: User, additional_payload: Optional[Dict[str, Any]] =", "return False return True def get_user_from_access_token(token: str) -> Optional[User]: if", "# type: ignore JWT_ALGORITHM, ) def jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE", "if user.effective_permissions else False return user def create_access_token_for_app(app: \"App\", user:", "= { \"app\": app_id, PERMISSIONS_FIELD: list(app_permission_enums & user_permission_enums), } payload", "jwt_encode(payload) def create_access_token( user: User, additional_payload: Optional[Dict[str, Any]] = None", "jwt_decode(token, verify_expiration=verify_expiration) except jwt.PyJWTError: return None def jwt_decode(token: str, verify_expiration=settings.JWT_EXPIRE)", "= jwt_base_payload(exp_delta, token_owner) payload.update( { \"token\": user.jwt_token_key, \"email\": user.email, \"type\":", "user: raise jwt.InvalidTokenError( \"Invalid token. Create new one by using", "verify_expiration}, ) def create_token(payload: Dict[str, Any], exp_delta: timedelta) -> str:", "user.email, \"type\": token_type, \"user_id\": graphene.Node.to_global_id(\"User\", user.id), \"is_staff\": user.is_staff, \"is_supplier\": user.is_supplier,", "import jwt from django.conf import settings from django.core.handlers.wsgi import WSGIRequest", "new one by using tokenCreate mutation.\" ) permissions = payload.get(PERMISSIONS_FIELD,", "def jwt_base_payload( exp_delta: Optional[timedelta], token_owner: str ) -> Dict[str, Any]:", "Dict[str, Any]: payload = jwt_base_payload(exp_delta, token_owner) payload.update( { \"token\": user.jwt_token_key,", "type: ignore JWT_ALGORITHM, ) def jwt_decode_with_exception_handler( token: str, verify_expiration=settings.JWT_EXPIRE )", "payload = {\"iat\": utc_now, JWT_OWNER_FIELD: token_owner} if exp_delta: payload[\"exp\"] =", "= \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME = \"refreshToken\" PERMISSIONS_FIELD =", "JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE = \"thirdparty\" JWT_REFRESH_TOKEN_COOKIE_NAME", "raise jwt.InvalidTokenError( \"Invalid token. Create new one by using tokenCreate", "= app.permissions.all() app_permission_enums = get_permission_names(app_permissions) permissions = user.effective_permissions user_permission_enums =", ") JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\"", "jwt_encode(payload) def create_refresh_token( user: User, additional_payload: Optional[Dict[str, Any]] = None", "on the Saleor side. The token which can be used", "used by App has additional field defining the permissions assigned", "Optional[Dict[str, Any]] = None ) -> str: payload = jwt_user_payload(", "payload[\"exp\"] = utc_now + exp_delta return payload def jwt_user_payload( user:", "token_owner) payload.update( { \"token\": user.jwt_token_key, \"email\": user.email, \"type\": token_type, \"user_id\":", "payload = jwt_decode(token) return get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict) -> Optional[User]:", "jwt token to proceed given operation on the Saleor side.", "JWT_ALGORITHM = \"HS256\" SALEOR_AUTH_HEADER = \"HTTP_AUTHORIZATION_BEARER\" DEFAULT_AUTH_HEADER = \"HTTP_AUTHORIZATION\" AUTH_HEADER_PREFIXES", "token: str, verify_expiration=settings.JWT_EXPIRE ) -> Optional[Dict[str, Any]]: try: return jwt_decode(token,", "can be used by App has additional field defining the", "Optional[User]: user = User.objects.filter(email=payload[\"email\"], is_active=True).first() user_jwt_token = payload.get(\"token\") if not", "jwt_decode(token) return get_user_from_access_payload(payload) def get_user_from_access_payload(payload: dict) -> Optional[User]: jwt_type =", "from datetime import datetime, timedelta from typing import Any, Dict,", "= None, token_owner: str = JWT_SALEOR_OWNER_NAME, ) -> Dict[str, Any]:", "= [\"JWT\", \"BEARER\"] JWT_ACCESS_TYPE = \"access\" JWT_REFRESH_TYPE = \"refresh\" JWT_THIRDPARTY_ACCESS_TYPE", "= user.effective_permissions user_permission_enums = get_permission_names(permissions) app_id = graphene.Node.to_global_id(\"App\", app.id) additional_payload", "& user_permission_enums), } payload = jwt_user_payload( user, JWT_THIRDPARTY_ACCESS_TYPE, exp_delta=settings.JWT_TTL_APP_ACCESS, additional_payload=additional_payload,", "user, JWT_REFRESH_TYPE, settings.JWT_TTL_REFRESH, additional_payload, ) return jwt_encode(payload) def get_token_from_request(request: WSGIRequest)" ]
[ "False, json.dumps(data, indent=4) else: for json_target_value in json_target[0]: json_added.append(json_target_value) json_final", "ClientConfiguration: \"\"\" This class is a handler for data configuration", "configuration as JSON. It reads configuration file once. \"\"\" if", "= json.load({}) return self.config_data def update_json_config(self, json_added, json_path, options, list_column,", "= jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0], dict): if len(list_column)==1: json_target[0][list_column[0]] =", "flask import make_response logger = logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json' class", "\"\"\" This class is a handler for data configuration with", "\"\"\" data = literal_eval(config_text) if(options != \"replace\"): json_target = jsonpath_rw_ext.match(json_path,", "None: if path is None: path = CONFIG_PATH else :", "ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod def get_path(self, match):", "import literal_eval from flask import make_response logger = logging.getLogger(__name__) CONFIG_PATH", "return self.config_data def update_json_config(self, json_added, json_path, options, list_column, config_text): \"\"\"", "if path.startswith('./') : path = path[1:] elif not path.startswith('/'): path", "else: return False, json.dumps(data, indent=4) else: for json_target_value in json_target[0]:", "path = path[1:] elif not path.startswith('/'): path = '/%s' %", "# check if item is an array if (first.startswith('[') and", "check if item is an array if (first.startswith('[') and first.endswith(']'))", "parse(json_path) matches = jsonpath_expr.find(data) if len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON path", "Each item is a path component, start from outer most", "elif not path.startswith('/'): path = '/%s' % (path) try: with", "json_target[0][list_column[0]] = json_added json_final = json_target[0] else: return False, json.dumps(data,", "path_element in ClientConfiguration.get_path(match.context): yield path_element yield str(match.path) @classmethod def update_json(self,", "jsonpath_rw import jsonpath, parse from . import events from ast", "self.config_data is None: if path is None: path = CONFIG_PATH", "pass json[first] = ClientConfiguration.update_json(json[first], path, value) return json except StopIteration:", "configuration with JSON data structure. \"\"\" def __init__(self): self.config_data =", "parse from . import events from ast import literal_eval from", "for path_element in ClientConfiguration.get_path(match.context): yield path_element yield str(match.path) @classmethod def", ". import events from ast import literal_eval from flask import", "a path component, start from outer most item. \"\"\" if", "% (path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\") as data_file:", "update_json_config(self, json_added, json_path, options, list_column, config_text): \"\"\" Write JSON file", "if self.config_data is None: if path is None: path =", "if isinstance(json_target[0], dict): if len(list_column)==1: json_target[0][list_column[0]] = json_added json_final =", "self.config_data = json.load(data_file) except Exception as err: logger.info(err) self.config_data =", "path not found.'})) for match in matches: data = ClientConfiguration.update_json(data,", "path, \"r\") as data_file: self.config_data = json.load(data_file) except Exception as", "import events from ast import literal_eval from flask import make_response", "get_path(self, match): \"\"\" Return an iterator based upon MATCH.PATH. Each", "and first.endswith(']')) or (first.startswith('{') and first.endswith('}')): try: first = int(first[1:-1])", "dictionary PATH with VALUE. Return updated JSON \"\"\" try: first", "\"replace\"): json_target = jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0], dict): if len(list_column)==1:", "next(path) # check if item is an array if (first.startswith('[')", "json[first] = ClientConfiguration.update_json(json[first], path, value) return json except StopIteration: return", "config_text): \"\"\" Write JSON file configuration \"\"\" data = literal_eval(config_text)", "ast import literal_eval from flask import make_response logger = logging.getLogger(__name__)", "first = next(path) # check if item is an array", "(path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\") as data_file: self.config_data", "__init__(self): self.config_data = None def read_json(self, path=None): \"\"\" Will get", "json, logging, jsonpath_rw_ext, jsonpath_rw from jsonpath_rw import jsonpath, parse from", "json_target[0] else: return False, json.dumps(data, indent=4) else: for json_target_value in", "upon MATCH.PATH. Each item is a path component, start from", "int(first[1:-1]) except ValueError: pass json[first] = ClientConfiguration.update_json(json[first], path, value) return", "indent=4)})) @classmethod def get_path(self, match): \"\"\" Return an iterator based", "make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod def get_path(self, match): \"\"\" Return an", "MATCH.PATH. Each item is a path component, start from outer", "options, list_column, config_text): \"\"\" Write JSON file configuration \"\"\" data", "json.dumps(data, indent=4) else: for json_target_value in json_target[0]: json_added.append(json_target_value) json_final =", "<filename>locust/configuration.py import os, json, logging, jsonpath_rw_ext, jsonpath_rw from jsonpath_rw import", "os, json, logging, jsonpath_rw_ext, jsonpath_rw from jsonpath_rw import jsonpath, parse", "self.config_data = None def read_json(self, path=None): \"\"\" Will get the", "item. \"\"\" if match.context is not None: for path_element in", "is an array if (first.startswith('[') and first.endswith(']')) or (first.startswith('{') and", "is None: if path is None: path = CONFIG_PATH else", "and first.endswith('}')): try: first = int(first[1:-1]) except ValueError: pass json[first]", "err: logger.info(err) self.config_data = json.load({}) return self.config_data def update_json_config(self, json_added,", "data_file: self.config_data = json.load(data_file) except Exception as err: logger.info(err) self.config_data", "return False, json.dumps(data, indent=4) else: for json_target_value in json_target[0]: json_added.append(json_target_value)", "matches = jsonpath_expr.find(data) if len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON path not", "is a path component, start from outer most item. \"\"\"", "handler for data configuration with JSON data structure. \"\"\" def", "len(list_column)==1: json_target[0][list_column[0]] = json_added json_final = json_target[0] else: return False,", "CONFIG_PATH = '/tests/settings/config.json' class ClientConfiguration: \"\"\" This class is a", "json_final = json_added jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(data) if", "get the data of configuration as JSON. It reads configuration", "else: for json_target_value in json_target[0]: json_added.append(json_target_value) json_final = json_added else:", "from . import events from ast import literal_eval from flask", "ClientConfiguration.get_path(match.context): yield path_element yield str(match.path) @classmethod def update_json(self, json, path,", "path, value): \"\"\" Update JSON dictionary PATH with VALUE. Return", "not None: for path_element in ClientConfiguration.get_path(match.context): yield path_element yield str(match.path)", "configuration \"\"\" data = literal_eval(config_text) if(options != \"replace\"): json_target =", "\"\"\" if match.context is not None: for path_element in ClientConfiguration.get_path(match.context):", "= path[1:] elif not path.startswith('/'): path = '/%s' % (path)", "= json_added json_final = json_target[0] else: return False, json.dumps(data, indent=4)", "matches: data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)}))", "outer most item. \"\"\" if match.context is not None: for", "file once. \"\"\" if self.config_data is None: if path is", "json_target = jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0], dict): if len(list_column)==1: json_target[0][list_column[0]]", "JSON dictionary PATH with VALUE. Return updated JSON \"\"\" try:", "import jsonpath, parse from . import events from ast import", "item is an array if (first.startswith('[') and first.endswith(']')) or (first.startswith('{')", "jsonpath_rw from jsonpath_rw import jsonpath, parse from . import events", "= int(first[1:-1]) except ValueError: pass json[first] = ClientConfiguration.update_json(json[first], path, value)", "json_target_value in json_target[0]: json_added.append(json_target_value) json_final = json_added else: json_final =", "import make_response logger = logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json' class ClientConfiguration:", "Return an iterator based upon MATCH.PATH. Each item is a", "jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0], dict): if len(list_column)==1: json_target[0][list_column[0]] = json_added", "\"\"\" Update JSON dictionary PATH with VALUE. Return updated JSON", ": if path.startswith('./') : path = path[1:] elif not path.startswith('/'):", "This class is a handler for data configuration with JSON", "update_json(self, json, path, value): \"\"\" Update JSON dictionary PATH with", "match in matches: data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True,", ": path = path[1:] elif not path.startswith('/'): path = '/%s'", "if(options != \"replace\"): json_target = jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0], dict):", "path[1:] elif not path.startswith('/'): path = '/%s' % (path) try:", "in ClientConfiguration.get_path(match.context): yield path_element yield str(match.path) @classmethod def update_json(self, json,", "json_added jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(data) if len(matches)==0: return", "'message':'JSON path not found.'})) for match in matches: data =", "= literal_eval(config_text) if(options != \"replace\"): json_target = jsonpath_rw_ext.match(json_path, data) if", "as JSON. It reads configuration file once. \"\"\" if self.config_data", "make_response logger = logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json' class ClientConfiguration: \"\"\"", "json.load({}) return self.config_data def update_json_config(self, json_added, json_path, options, list_column, config_text):", "+ path, \"r\") as data_file: self.config_data = json.load(data_file) except Exception", "Write JSON file configuration \"\"\" data = literal_eval(config_text) if(options !=", "an array if (first.startswith('[') and first.endswith(']')) or (first.startswith('{') and first.endswith('}')):", "ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod def get_path(self,", "from flask import make_response logger = logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json'", "self.config_data def update_json_config(self, json_added, json_path, options, list_column, config_text): \"\"\" Write", "'data':json.dumps(data, indent=4)})) @classmethod def get_path(self, match): \"\"\" Return an iterator", "based upon MATCH.PATH. Each item is a path component, start", "path_element yield str(match.path) @classmethod def update_json(self, json, path, value): \"\"\"", "path=None): \"\"\" Will get the data of configuration as JSON.", "try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\") as data_file: self.config_data =", "Exception as err: logger.info(err) self.config_data = json.load({}) return self.config_data def", "\"\"\" Write JSON file configuration \"\"\" data = literal_eval(config_text) if(options", "json_added else: json_final = json_added jsonpath_expr = parse(json_path) matches =", "= logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json' class ClientConfiguration: \"\"\" This class", "of configuration as JSON. It reads configuration file once. \"\"\"", "= parse(json_path) matches = jsonpath_expr.find(data) if len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON", "first = int(first[1:-1]) except ValueError: pass json[first] = ClientConfiguration.update_json(json[first], path,", "except ValueError: pass json[first] = ClientConfiguration.update_json(json[first], path, value) return json", "It reads configuration file once. \"\"\" if self.config_data is None:", "yield path_element yield str(match.path) @classmethod def update_json(self, json, path, value):", "not path.startswith('/'): path = '/%s' % (path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1]", "path is None: path = CONFIG_PATH else : if path.startswith('./')", "in json_target[0]: json_added.append(json_target_value) json_final = json_added else: json_final = json_added", "def update_json(self, json, path, value): \"\"\" Update JSON dictionary PATH", "try: first = int(first[1:-1]) except ValueError: pass json[first] = ClientConfiguration.update_json(json[first],", "once. \"\"\" if self.config_data is None: if path is None:", "json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod def get_path(self, match): \"\"\"", "except Exception as err: logger.info(err) self.config_data = json.load({}) return self.config_data", "data) if isinstance(json_target[0], dict): if len(list_column)==1: json_target[0][list_column[0]] = json_added json_final", "= json_added jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(data) if len(matches)==0:", "from ast import literal_eval from flask import make_response logger =", "else: json_final = json_added jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(data)", "match): \"\"\" Return an iterator based upon MATCH.PATH. Each item", "path component, start from outer most item. \"\"\" if match.context", "iterator based upon MATCH.PATH. Each item is a path component,", "is not None: for path_element in ClientConfiguration.get_path(match.context): yield path_element yield", "(first.startswith('{') and first.endswith('}')): try: first = int(first[1:-1]) except ValueError: pass", "jsonpath_expr.find(data) if len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON path not found.'})) for", "= ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod def", "is None: path = CONFIG_PATH else : if path.startswith('./') :", "in matches: data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data,", "\"\"\" try: first = next(path) # check if item is", "JSON file configuration \"\"\" data = literal_eval(config_text) if(options != \"replace\"):", "json_added.append(json_target_value) json_final = json_added else: json_final = json_added jsonpath_expr =", "= json_target[0] else: return False, json.dumps(data, indent=4) else: for json_target_value", "None: for path_element in ClientConfiguration.get_path(match.context): yield path_element yield str(match.path) @classmethod", "yield str(match.path) @classmethod def update_json(self, json, path, value): \"\"\" Update", "for json_target_value in json_target[0]: json_added.append(json_target_value) json_final = json_added else: json_final", "configuration file once. \"\"\" if self.config_data is None: if path", "found.'})) for match in matches: data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final)", "Return updated JSON \"\"\" try: first = next(path) # check", "= next(path) # check if item is an array if", "if len(list_column)==1: json_target[0][list_column[0]] = json_added json_final = json_target[0] else: return", "logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json' class ClientConfiguration: \"\"\" This class is", "structure. \"\"\" def __init__(self): self.config_data = None def read_json(self, path=None):", "= json_added else: json_final = json_added jsonpath_expr = parse(json_path) matches", "start from outer most item. \"\"\" if match.context is not", "updated JSON \"\"\" try: first = next(path) # check if", "= ClientConfiguration.update_json(json[first], path, value) return json except StopIteration: return value", "array if (first.startswith('[') and first.endswith(']')) or (first.startswith('{') and first.endswith('}')): try:", "@classmethod def get_path(self, match): \"\"\" Return an iterator based upon", "component, start from outer most item. \"\"\" if match.context is", "try: first = next(path) # check if item is an", "if item is an array if (first.startswith('[') and first.endswith(']')) or", "as data_file: self.config_data = json.load(data_file) except Exception as err: logger.info(err)", "Will get the data of configuration as JSON. It reads", "json.load(data_file) except Exception as err: logger.info(err) self.config_data = json.load({}) return", "import os, json, logging, jsonpath_rw_ext, jsonpath_rw from jsonpath_rw import jsonpath,", "str(match.path) @classmethod def update_json(self, json, path, value): \"\"\" Update JSON", "list_column, config_text): \"\"\" Write JSON file configuration \"\"\" data =", "events from ast import literal_eval from flask import make_response logger", "json_final = json_target[0] else: return False, json.dumps(data, indent=4) else: for", "!= \"replace\"): json_target = jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0], dict): if", "not found.'})) for match in matches: data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match),", "logger.info(err) self.config_data = json.load({}) return self.config_data def update_json_config(self, json_added, json_path,", "JSON. It reads configuration file once. \"\"\" if self.config_data is", "is a handler for data configuration with JSON data structure.", "def read_json(self, path=None): \"\"\" Will get the data of configuration", "item is a path component, start from outer most item.", "= json.load(data_file) except Exception as err: logger.info(err) self.config_data = json.load({})", "path = '/%s' % (path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path,", "JSON data structure. \"\"\" def __init__(self): self.config_data = None def", "path = CONFIG_PATH else : if path.startswith('./') : path =", "\"r\") as data_file: self.config_data = json.load(data_file) except Exception as err:", "json_added json_final = json_target[0] else: return False, json.dumps(data, indent=4) else:", "= jsonpath_expr.find(data) if len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON path not found.'}))", "for data configuration with JSON data structure. \"\"\" def __init__(self):", "first.endswith(']')) or (first.startswith('{') and first.endswith('}')): try: first = int(first[1:-1]) except", "path.startswith('/'): path = '/%s' % (path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] +", "= '/%s' % (path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\")", "data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod", "= '/tests/settings/config.json' class ClientConfiguration: \"\"\" This class is a handler", "data structure. \"\"\" def __init__(self): self.config_data = None def read_json(self,", "the data of configuration as JSON. It reads configuration file", "json_path, options, list_column, config_text): \"\"\" Write JSON file configuration \"\"\"", "logging, jsonpath_rw_ext, jsonpath_rw from jsonpath_rw import jsonpath, parse from .", "CONFIG_PATH else : if path.startswith('./') : path = path[1:] elif", "return make_response(json.dumps({'success':False, 'message':'JSON path not found.'})) for match in matches:", "jsonpath_expr = parse(json_path) matches = jsonpath_expr.find(data) if len(matches)==0: return make_response(json.dumps({'success':False,", "data = literal_eval(config_text) if(options != \"replace\"): json_target = jsonpath_rw_ext.match(json_path, data)", "an iterator based upon MATCH.PATH. Each item is a path", "with VALUE. Return updated JSON \"\"\" try: first = next(path)", "Update JSON dictionary PATH with VALUE. Return updated JSON \"\"\"", "logger = logging.getLogger(__name__) CONFIG_PATH = '/tests/settings/config.json' class ClientConfiguration: \"\"\" This", "path.startswith('./') : path = path[1:] elif not path.startswith('/'): path =", "for match in matches: data = ClientConfiguration.update_json(data, ClientConfiguration.get_path(match), json_final) return", "def __init__(self): self.config_data = None def read_json(self, path=None): \"\"\" Will", "len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON path not found.'})) for match in", "data configuration with JSON data structure. \"\"\" def __init__(self): self.config_data", "data of configuration as JSON. It reads configuration file once.", "as err: logger.info(err) self.config_data = json.load({}) return self.config_data def update_json_config(self,", "self.config_data = json.load({}) return self.config_data def update_json_config(self, json_added, json_path, options,", "'/%s' % (path) try: with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\") as", "with JSON data structure. \"\"\" def __init__(self): self.config_data = None", "jsonpath_rw_ext, jsonpath_rw from jsonpath_rw import jsonpath, parse from . import", "None def read_json(self, path=None): \"\"\" Will get the data of", "json_final = json_added else: json_final = json_added jsonpath_expr = parse(json_path)", "\"\"\" if self.config_data is None: if path is None: path", "JSON \"\"\" try: first = next(path) # check if item", "if (first.startswith('[') and first.endswith(']')) or (first.startswith('{') and first.endswith('}')): try: first", "reads configuration file once. \"\"\" if self.config_data is None: if", "open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\") as data_file: self.config_data = json.load(data_file) except", "from jsonpath_rw import jsonpath, parse from . import events from", "\"\"\" Will get the data of configuration as JSON. It", "value): \"\"\" Update JSON dictionary PATH with VALUE. Return updated", "\"\"\" Return an iterator based upon MATCH.PATH. Each item is", "a handler for data configuration with JSON data structure. \"\"\"", "isinstance(json_target[0], dict): if len(list_column)==1: json_target[0][list_column[0]] = json_added json_final = json_target[0]", "json_added, json_path, options, list_column, config_text): \"\"\" Write JSON file configuration", "json_target[0]: json_added.append(json_target_value) json_final = json_added else: json_final = json_added jsonpath_expr", "indent=4) else: for json_target_value in json_target[0]: json_added.append(json_target_value) json_final = json_added", "json, path, value): \"\"\" Update JSON dictionary PATH with VALUE.", "if path is None: path = CONFIG_PATH else : if", "def update_json_config(self, json_added, json_path, options, list_column, config_text): \"\"\" Write JSON", "make_response(json.dumps({'success':False, 'message':'JSON path not found.'})) for match in matches: data", "ValueError: pass json[first] = ClientConfiguration.update_json(json[first], path, value) return json except", "literal_eval from flask import make_response logger = logging.getLogger(__name__) CONFIG_PATH =", "match.context is not None: for path_element in ClientConfiguration.get_path(match.context): yield path_element", "dict): if len(list_column)==1: json_target[0][list_column[0]] = json_added json_final = json_target[0] else:", "= CONFIG_PATH else : if path.startswith('./') : path = path[1:]", "= None def read_json(self, path=None): \"\"\" Will get the data", "PATH with VALUE. Return updated JSON \"\"\" try: first =", "read_json(self, path=None): \"\"\" Will get the data of configuration as", "def get_path(self, match): \"\"\" Return an iterator based upon MATCH.PATH.", "or (first.startswith('{') and first.endswith('}')): try: first = int(first[1:-1]) except ValueError:", "VALUE. Return updated JSON \"\"\" try: first = next(path) #", "class ClientConfiguration: \"\"\" This class is a handler for data", "'/tests/settings/config.json' class ClientConfiguration: \"\"\" This class is a handler for", "literal_eval(config_text) if(options != \"replace\"): json_target = jsonpath_rw_ext.match(json_path, data) if isinstance(json_target[0],", "jsonpath, parse from . import events from ast import literal_eval", "return make_response(json.dumps({'success':True, 'data':json.dumps(data, indent=4)})) @classmethod def get_path(self, match): \"\"\" Return", "if match.context is not None: for path_element in ClientConfiguration.get_path(match.context): yield", "first.endswith('}')): try: first = int(first[1:-1]) except ValueError: pass json[first] =", "else : if path.startswith('./') : path = path[1:] elif not", "from outer most item. \"\"\" if match.context is not None:", "class is a handler for data configuration with JSON data", "if len(matches)==0: return make_response(json.dumps({'success':False, 'message':'JSON path not found.'})) for match", "\"\"\" def __init__(self): self.config_data = None def read_json(self, path=None): \"\"\"", "@classmethod def update_json(self, json, path, value): \"\"\" Update JSON dictionary", "None: path = CONFIG_PATH else : if path.startswith('./') : path", "file configuration \"\"\" data = literal_eval(config_text) if(options != \"replace\"): json_target", "most item. \"\"\" if match.context is not None: for path_element", "(first.startswith('[') and first.endswith(']')) or (first.startswith('{') and first.endswith('}')): try: first =", "with open((os.environ['PYTHONPATH'].split(os.pathsep))[-1] + path, \"r\") as data_file: self.config_data = json.load(data_file)" ]
[ "[ ('data', '0022_discardaction'), ] operations = [ migrations.AddField( model_name='discardaction', name='answers',", "on 2019-11-14 16:48 import django.contrib.postgres.fields.jsonb from django.db import migrations class", "django.contrib.postgres.fields.jsonb from django.db import migrations class Migration(migrations.Migration): dependencies = [", "by Django 2.2.4 on 2019-11-14 16:48 import django.contrib.postgres.fields.jsonb from django.db", "2019-11-14 16:48 import django.contrib.postgres.fields.jsonb from django.db import migrations class Migration(migrations.Migration):", "django.db import migrations class Migration(migrations.Migration): dependencies = [ ('data', '0022_discardaction'),", "('data', '0022_discardaction'), ] operations = [ migrations.AddField( model_name='discardaction', name='answers', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True,", "operations = [ migrations.AddField( model_name='discardaction', name='answers', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ), ]", "<reponame>SIXMON/peps # Generated by Django 2.2.4 on 2019-11-14 16:48 import", "Migration(migrations.Migration): dependencies = [ ('data', '0022_discardaction'), ] operations = [", "import migrations class Migration(migrations.Migration): dependencies = [ ('data', '0022_discardaction'), ]", "16:48 import django.contrib.postgres.fields.jsonb from django.db import migrations class Migration(migrations.Migration): dependencies", "Generated by Django 2.2.4 on 2019-11-14 16:48 import django.contrib.postgres.fields.jsonb from", "'0022_discardaction'), ] operations = [ migrations.AddField( model_name='discardaction', name='answers', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),", "Django 2.2.4 on 2019-11-14 16:48 import django.contrib.postgres.fields.jsonb from django.db import", "from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('data',", "# Generated by Django 2.2.4 on 2019-11-14 16:48 import django.contrib.postgres.fields.jsonb", "import django.contrib.postgres.fields.jsonb from django.db import migrations class Migration(migrations.Migration): dependencies =", "dependencies = [ ('data', '0022_discardaction'), ] operations = [ migrations.AddField(", "2.2.4 on 2019-11-14 16:48 import django.contrib.postgres.fields.jsonb from django.db import migrations", "class Migration(migrations.Migration): dependencies = [ ('data', '0022_discardaction'), ] operations =", "migrations class Migration(migrations.Migration): dependencies = [ ('data', '0022_discardaction'), ] operations", "] operations = [ migrations.AddField( model_name='discardaction', name='answers', field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True), ),", "= [ ('data', '0022_discardaction'), ] operations = [ migrations.AddField( model_name='discardaction'," ]
[ "any class that wants to be serialized to JSON\"\"\" def", "in your classes.') class CommonMessage(Jsonable): def __init__(self): self.client = Client()", "type=self.type, body=self.body, tags=self.tags) class Client(Jsonable): def __init__(self): self.id = \"\"", "\"\" self.name = \"\" self.time = int(round(time() * 1000)) def", "self.type = \"\" self.body = \"\" self.tags = [\"music\", \"culture\",", "to JSON\"\"\" def toJson(self): \"\"\"Abstract method\"\"\" raise NotImplementedError('You should implement", "def __init__(self): self.id = \"\" self.name = \"\" self.time =", "to standardize the toJson method to be implemented by any", "def toJson(self): return dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body, tags=self.tags) class Client(Jsonable):", "class that wants to be serialized to JSON\"\"\" def toJson(self):", "should implement this method in your classes.') class CommonMessage(Jsonable): def", "return dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body, tags=self.tags) class Client(Jsonable): def __init__(self):", "name=self.name, time=self.time) class Emitter(Jsonable): def __init__(self): self.id = \"\" def", "'complex (nested)' Python objects.\"\"\" def default(self, o): if hasattr(o, 'toJson'):", "__init__(self): self.client = Client() self.emitter = Emitter() self.type = \"\"", "class CommonMessage(Jsonable): def __init__(self): self.client = Client() self.emitter = Emitter()", "method to be implemented by any class that wants to", "class to standardize the toJson method to be implemented by", "= [\"music\", \"culture\", \"food\"] def toJson(self): return dict(client=self.client, emitter=self.emitter, type=self.type,", "\"\" self.time = int(round(time() * 1000)) def toJson(self): return dict(id=self.id,", "encoder for 'complex (nested)' Python objects.\"\"\" def default(self, o): if", "self.time = int(round(time() * 1000)) def toJson(self): return dict(id=self.id, name=self.name,", "= Emitter() self.type = \"\" self.body = \"\" self.tags =", "= \"\" self.time = int(round(time() * 1000)) def toJson(self): return", "* 1000)) def toJson(self): return dict(id=self.id, name=self.name, time=self.time) class Emitter(Jsonable):", "[\"music\", \"culture\", \"food\"] def toJson(self): return dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body,", "wants to be serialized to JSON\"\"\" def toJson(self): \"\"\"Abstract method\"\"\"", "= \"\" self.name = \"\" self.time = int(round(time() * 1000))", "to be implemented by any class that wants to be", "Jsonable: \"\"\"Abstract class to standardize the toJson method to be", "toJson(self): \"\"\"Abstract method\"\"\" raise NotImplementedError('You should implement this method in", "Emitter(Jsonable): def __init__(self): self.id = \"\" def toJson(self): return dict(id=self.id)", "this method in your classes.') class CommonMessage(Jsonable): def __init__(self): self.client", "1000)) def toJson(self): return dict(id=self.id, name=self.name, time=self.time) class Emitter(Jsonable): def", "\"\" self.tags = [\"music\", \"culture\", \"food\"] def toJson(self): return dict(client=self.client,", "import time class Jsonable: \"\"\"Abstract class to standardize the toJson", "= \"\" self.body = \"\" self.tags = [\"music\", \"culture\", \"food\"]", "from time import time class Jsonable: \"\"\"Abstract class to standardize", "class Client(Jsonable): def __init__(self): self.id = \"\" self.name = \"\"", "__init__(self): self.id = \"\" self.name = \"\" self.time = int(round(time()", "self.name = \"\" self.time = int(round(time() * 1000)) def toJson(self):", "def toJson(self): return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder for", "time import time class Jsonable: \"\"\"Abstract class to standardize the", "be serialized to JSON\"\"\" def toJson(self): \"\"\"Abstract method\"\"\" raise NotImplementedError('You", "int(round(time() * 1000)) def toJson(self): return dict(id=self.id, name=self.name, time=self.time) class", "__init__(self): self.id = \"\" def toJson(self): return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder):", "that wants to be serialized to JSON\"\"\" def toJson(self): \"\"\"Abstract", "toJson(self): return dict(id=self.id, name=self.name, time=self.time) class Emitter(Jsonable): def __init__(self): self.id", "= Client() self.emitter = Emitter() self.type = \"\" self.body =", "\"\"\"Abstract class to standardize the toJson method to be implemented", "method in your classes.') class CommonMessage(Jsonable): def __init__(self): self.client =", "JSON encoder for 'complex (nested)' Python objects.\"\"\" def default(self, o):", "def default(self, o): if hasattr(o, 'toJson'): return o.toJson() else: return", "class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder for 'complex (nested)' Python objects.\"\"\"", "JSON\"\"\" def toJson(self): \"\"\"Abstract method\"\"\" raise NotImplementedError('You should implement this", "tags=self.tags) class Client(Jsonable): def __init__(self): self.id = \"\" self.name =", "def __init__(self): self.client = Client() self.emitter = Emitter() self.type =", "objects.\"\"\" def default(self, o): if hasattr(o, 'toJson'): return o.toJson() else:", "self.id = \"\" def toJson(self): return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic", "class Jsonable: \"\"\"Abstract class to standardize the toJson method to", "dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder for 'complex (nested)' Python", "default(self, o): if hasattr(o, 'toJson'): return o.toJson() else: return JSONEncoder.default(self,", "\"\" def toJson(self): return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder", "your classes.') class CommonMessage(Jsonable): def __init__(self): self.client = Client() self.emitter", "\"\"\"Basic JSON encoder for 'complex (nested)' Python objects.\"\"\" def default(self,", "\"culture\", \"food\"] def toJson(self): return dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body, tags=self.tags)", "standardize the toJson method to be implemented by any class", "be implemented by any class that wants to be serialized", "self.client = Client() self.emitter = Emitter() self.type = \"\" self.body", "CommonMessage(Jsonable): def __init__(self): self.client = Client() self.emitter = Emitter() self.type", "time class Jsonable: \"\"\"Abstract class to standardize the toJson method", "the toJson method to be implemented by any class that", "dict(id=self.id, name=self.name, time=self.time) class Emitter(Jsonable): def __init__(self): self.id = \"\"", "return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder for 'complex (nested)'", "emitter=self.emitter, type=self.type, body=self.body, tags=self.tags) class Client(Jsonable): def __init__(self): self.id =", "import JSONEncoder from time import time class Jsonable: \"\"\"Abstract class", "= \"\" def toJson(self): return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON", "\"food\"] def toJson(self): return dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body, tags=self.tags) class", "= \"\" self.tags = [\"music\", \"culture\", \"food\"] def toJson(self): return", "json import JSONEncoder from time import time class Jsonable: \"\"\"Abstract", "implemented by any class that wants to be serialized to", "self.emitter = Emitter() self.type = \"\" self.body = \"\" self.tags", "NotImplementedError('You should implement this method in your classes.') class CommonMessage(Jsonable):", "(nested)' Python objects.\"\"\" def default(self, o): if hasattr(o, 'toJson'): return", "implement this method in your classes.') class CommonMessage(Jsonable): def __init__(self):", "by any class that wants to be serialized to JSON\"\"\"", "time=self.time) class Emitter(Jsonable): def __init__(self): self.id = \"\" def toJson(self):", "Client() self.emitter = Emitter() self.type = \"\" self.body = \"\"", "Emitter() self.type = \"\" self.body = \"\" self.tags = [\"music\",", "method\"\"\" raise NotImplementedError('You should implement this method in your classes.')", "Client(Jsonable): def __init__(self): self.id = \"\" self.name = \"\" self.time", "toJson(self): return dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body, tags=self.tags) class Client(Jsonable): def", "classes.') class CommonMessage(Jsonable): def __init__(self): self.client = Client() self.emitter =", "ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder for 'complex (nested)' Python objects.\"\"\" def", "JSONEncoder from time import time class Jsonable: \"\"\"Abstract class to", "for 'complex (nested)' Python objects.\"\"\" def default(self, o): if hasattr(o,", "return dict(id=self.id, name=self.name, time=self.time) class Emitter(Jsonable): def __init__(self): self.id =", "self.body = \"\" self.tags = [\"music\", \"culture\", \"food\"] def toJson(self):", "to be serialized to JSON\"\"\" def toJson(self): \"\"\"Abstract method\"\"\" raise", "def toJson(self): \"\"\"Abstract method\"\"\" raise NotImplementedError('You should implement this method", "= int(round(time() * 1000)) def toJson(self): return dict(id=self.id, name=self.name, time=self.time)", "dict(client=self.client, emitter=self.emitter, type=self.type, body=self.body, tags=self.tags) class Client(Jsonable): def __init__(self): self.id", "raise NotImplementedError('You should implement this method in your classes.') class", "class Emitter(Jsonable): def __init__(self): self.id = \"\" def toJson(self): return", "toJson(self): return dict(id=self.id) class ComplexJsonEncoder(JSONEncoder): \"\"\"Basic JSON encoder for 'complex", "Python objects.\"\"\" def default(self, o): if hasattr(o, 'toJson'): return o.toJson()", "o): if hasattr(o, 'toJson'): return o.toJson() else: return JSONEncoder.default(self, o)", "\"\"\"Abstract method\"\"\" raise NotImplementedError('You should implement this method in your", "self.tags = [\"music\", \"culture\", \"food\"] def toJson(self): return dict(client=self.client, emitter=self.emitter,", "self.id = \"\" self.name = \"\" self.time = int(round(time() *", "def __init__(self): self.id = \"\" def toJson(self): return dict(id=self.id) class", "body=self.body, tags=self.tags) class Client(Jsonable): def __init__(self): self.id = \"\" self.name", "toJson method to be implemented by any class that wants", "from json import JSONEncoder from time import time class Jsonable:", "serialized to JSON\"\"\" def toJson(self): \"\"\"Abstract method\"\"\" raise NotImplementedError('You should", "\"\" self.body = \"\" self.tags = [\"music\", \"culture\", \"food\"] def", "def toJson(self): return dict(id=self.id, name=self.name, time=self.time) class Emitter(Jsonable): def __init__(self):" ]
[ "\"\"\"Compute pi.\"\"\" from decimal import Decimal, getcontext import argparse import", "getcontext import argparse import itertools class ComputePi: \"\"\"Compute pi to", "ComputePi: \"\"\"Compute pi to a specific precision using multiple algorithms.\"\"\"", "= 4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239, one)) pi //= 10**20 return", "term = term // divisor if term == 0: break", "x * x x_squared_plus_1 = x_squared + 1 term =", "Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi", "def arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x) using euler's accelerated formula. Based", "pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default:", "Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec = precision + 20 pi = Decimal(0)", "Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4)", "import argparse import itertools class ComputePi: \"\"\"Compute pi to a", "Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x * x x_squared_plus_1 =", "x_squared_plus_1 = x_squared + 1 term = (x * one)", "= (two_n+1) * x_squared_plus_1 term *= two_n term += divisor", "* x x_squared_plus_1 = x_squared + 1 term = (x", "@staticmethod def machin_euler(digits): \"\"\"Compute pi using Machin's formula. Based on", "divisor if term == 0: break total += term two_n", "using euler's accelerated formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x", "@staticmethod def arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x) using euler's accelerated formula.", "to a specific precision using multiple algorithms.\"\"\" @staticmethod def BBP(precision):", "import Decimal, getcontext import argparse import itertools class ComputePi: \"\"\"Compute", "x x_squared_plus_1 = x_squared + 1 term = (x *", "one) - ComputePi.arctan_euler(239, one)) pi //= 10**20 return '3.{}'.format(str(pi)[1:]) if", "if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int,", "getcontext().prec = precision + 20 pi = Decimal(0) for k", "precision + 20 pi = Decimal(0) for k in itertools.count():", "Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi @staticmethod def arctan_euler(x,", "pi to a specific precision using multiple algorithms.\"\"\" @staticmethod def", "itertools class ComputePi: \"\"\"Compute pi to a specific precision using", "pi.\"\"\" from decimal import Decimal, getcontext import argparse import itertools", "// divisor if term == 0: break total += term", "argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi", "//= 10**20 return '3.{}'.format(str(pi)[1:]) if __name__ == '__main__': parser =", "pi += term if term < Decimal(10)**(-precision-10): break pi =", "Machin's formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits + 20)", "// x_squared_plus_1 total = term two_n = 2 while 1:", "return total @staticmethod def machin_euler(digits): \"\"\"Compute pi using Machin's formula.", "argparse import itertools class ComputePi: \"\"\"Compute pi to a specific", "+ 20) pi = 4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239, one)) pi", "specific precision using multiple algorithms.\"\"\" @staticmethod def BBP(precision): \"\"\"Compute pi", "+= term two_n += 2 return total @staticmethod def machin_euler(digits):", "@staticmethod def BBP(precision): \"\"\"Compute pi using the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec", "euler's accelerated formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x *", "return '3.{}'.format(str(pi)[1:]) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.')", "str(pi)[:-19] return pi @staticmethod def arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x) using", "two_n = 2 while 1: divisor = (two_n+1) * x_squared_plus_1", "# round the division term = term // divisor if", "for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) -", "the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec = precision + 20 pi =", "term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi @staticmethod", "* one) // x_squared_plus_1 total = term two_n = 2", "pi @staticmethod def arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x) using euler's accelerated", "itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6))", "Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits + 20) pi =", "+= 2 return total @staticmethod def machin_euler(digits): \"\"\"Compute pi using", "10**20 return '3.{}'.format(str(pi)[1:]) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates", "+= term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19]", "return pi @staticmethod def arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x) using euler's", "formula.\"\"\" getcontext().prec = precision + 20 pi = Decimal(0) for", "class ComputePi: \"\"\"Compute pi to a specific precision using multiple", "multiple algorithms.\"\"\" @staticmethod def BBP(precision): \"\"\"Compute pi using the Bailey-Borwein-Plouffe", "x_squared_plus_1 total = term two_n = 2 while 1: divisor", "2 while 1: divisor = (two_n+1) * x_squared_plus_1 term *=", "1 term = (x * one) // x_squared_plus_1 total =", "2 return total @staticmethod def machin_euler(digits): \"\"\"Compute pi using Machin's", "10**(digits + 20) pi = 4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239, one))", "term += divisor // 2 # round the division term", "divisor // 2 # round the division term = term", "Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if", "arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x) using euler's accelerated formula. Based on", "using Machin's formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits +", "/= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break", "precision using multiple algorithms.\"\"\" @staticmethod def BBP(precision): \"\"\"Compute pi using", "BBP(precision): \"\"\"Compute pi using the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec = precision", "pi = 4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239, one)) pi //= 10**20", "- Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi", "* x_squared_plus_1 term *= two_n term += divisor // 2", "import itertools class ComputePi: \"\"\"Compute pi to a specific precision", "(x * one) // x_squared_plus_1 total = term two_n =", "ComputePi.arctan_euler(239, one)) pi //= 10**20 return '3.{}'.format(str(pi)[1:]) if __name__ ==", "arctan(1/x) using euler's accelerated formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared =", "pi using Machin's formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits", "if term == 0: break total += term two_n +=", "in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) -", "using multiple algorithms.\"\"\" @staticmethod def BBP(precision): \"\"\"Compute pi using the", "term *= two_n term += divisor // 2 # round", "term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return", "= x * x x_squared_plus_1 = x_squared + 1 term", "= 2 while 1: divisor = (two_n+1) * x_squared_plus_1 term", "parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision", "__name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100,", "def BBP(precision): \"\"\"Compute pi using the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec =", "accelerated formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x * x", "\"\"\"Calculate arctan(1/x) using euler's accelerated formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared", "precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer", "'__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired", "default=100, help='The desired precision of pi (default: 100 digits)') args", "1: divisor = (two_n+1) * x_squared_plus_1 term *= two_n term", "pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1)", "= term // divisor if term == 0: break total", "while 1: divisor = (two_n+1) * x_squared_plus_1 term *= two_n", "division term = term // divisor if term == 0:", "total += term two_n += 2 return total @staticmethod def", "one)) pi //= 10**20 return '3.{}'.format(str(pi)[1:]) if __name__ == '__main__':", "total @staticmethod def machin_euler(digits): \"\"\"Compute pi using Machin's formula. Based", "< Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi @staticmethod def", "x_squared_plus_1 term *= two_n term += divisor // 2 #", "machin_euler(digits): \"\"\"Compute pi using Machin's formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one", "desired precision of pi (default: 100 digits)') args = parser.parse_args()", "a specific precision using multiple algorithms.\"\"\" @staticmethod def BBP(precision): \"\"\"Compute", "Decimal, getcontext import argparse import itertools class ComputePi: \"\"\"Compute pi", "+ 1 term = (x * one) // x_squared_plus_1 total", "+ 20 pi = Decimal(0) for k in itertools.count(): term", "k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5)", "term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term", "one = 10**(digits + 20) pi = 4*(4*ComputePi.arctan_euler(5, one) -", "formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits + 20) pi", "help='The desired precision of pi (default: 100 digits)') args =", "on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x * x x_squared_plus_1 = x_squared", "(default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.machin_euler(args.precision))", "Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term <", "break total += term two_n += 2 return total @staticmethod", "of pi (default: 100 digits)') args = parser.parse_args() pi_computer =", "2 # round the division term = term // divisor", "on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits + 20) pi = 4*(4*ComputePi.arctan_euler(5,", "parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100", "term two_n = 2 while 1: divisor = (two_n+1) *", "def machin_euler(digits): \"\"\"Compute pi using Machin's formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\"", "= precision + 20 pi = Decimal(0) for k in", "== '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The", "(two_n+1) * x_squared_plus_1 term *= two_n term += divisor //", "two_n += 2 return total @staticmethod def machin_euler(digits): \"\"\"Compute pi", "= x_squared + 1 term = (x * one) //", "// 2 # round the division term = term //", "one=1000000): \"\"\"Calculate arctan(1/x) using euler's accelerated formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\"", "decimal import Decimal, getcontext import argparse import itertools class ComputePi:", "+= divisor // 2 # round the division term =", "= 10**(digits + 20) pi = 4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239,", "algorithms.\"\"\" @staticmethod def BBP(precision): \"\"\"Compute pi using the Bailey-Borwein-Plouffe formula.\"\"\"", "<gh_stars>0 \"\"\"Compute pi.\"\"\" from decimal import Decimal, getcontext import argparse", "\"\"\"Compute pi to a specific precision using multiple algorithms.\"\"\" @staticmethod", "total = term two_n = 2 while 1: divisor =", "'3.{}'.format(str(pi)[1:]) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision',", "pi //= 10**20 return '3.{}'.format(str(pi)[1:]) if __name__ == '__main__': parser", "20 pi = Decimal(0) for k in itertools.count(): term =", "*= two_n term += divisor // 2 # round the", "round the division term = term // divisor if term", "term = (x * one) // x_squared_plus_1 total = term", "- Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term", "x_squared + 1 term = (x * one) // x_squared_plus_1", "term two_n += 2 return total @staticmethod def machin_euler(digits): \"\"\"Compute", "type=int, default=100, help='The desired precision of pi (default: 100 digits)')", "divisor = (two_n+1) * x_squared_plus_1 term *= two_n term +=", "pi using the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec = precision + 20", "formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x * x x_squared_plus_1", "term // divisor if term == 0: break total +=", "- Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term", "the division term = term // divisor if term ==", "using the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec = precision + 20 pi", "= argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of", "http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one = 10**(digits + 20) pi = 4*(4*ComputePi.arctan_euler(5, one)", "break pi = str(pi)[:-19] return pi @staticmethod def arctan_euler(x, one=1000000):", "= str(pi)[:-19] return pi @staticmethod def arctan_euler(x, one=1000000): \"\"\"Calculate arctan(1/x)", "= (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /=", "4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239, one)) pi //= 10**20 return '3.{}'.format(str(pi)[1:])", "http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" x_squared = x * x x_squared_plus_1 = x_squared +", "== 0: break total += term two_n += 2 return", "from decimal import Decimal, getcontext import argparse import itertools class", "\"\"\"Compute pi using the Bailey-Borwein-Plouffe formula.\"\"\" getcontext().prec = precision +", "= (x * one) // x_squared_plus_1 total = term two_n", "pi = str(pi)[:-19] return pi @staticmethod def arctan_euler(x, one=1000000): \"\"\"Calculate", "term == 0: break total += term two_n += 2", "20) pi = 4*(4*ComputePi.arctan_euler(5, one) - ComputePi.arctan_euler(239, one)) pi //=", "- ComputePi.arctan_euler(239, one)) pi //= 10**20 return '3.{}'.format(str(pi)[1:]) if __name__", "(Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k", "Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi +=", "one) // x_squared_plus_1 total = term two_n = 2 while", "x_squared = x * x x_squared_plus_1 = x_squared + 1", "two_n term += divisor // 2 # round the division", "pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi()", "= Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) -", "if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi", "= term two_n = 2 while 1: divisor = (two_n+1)", "\"\"\"Compute pi using Machin's formula. Based on http://www.craig-wood.com/nick/articles/pi-machin/\"\"\" one =", "0: break total += term two_n += 2 return total", "term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10):" ]
[ "= DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional, deploy the", "as the admin contract proxy_admin = ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"],", "#!/usr/bin/python3 import time from brownie import ( DataTypes, TransparentUpgradeableProxy, ProxyAdmin,", "10 ) proxy = TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function, # gas", "publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional, deploy the ProxyAdmin and use that", "use that as the admin contract proxy_admin = ProxyAdmin.deploy( {\"from\":", ") from scripts.helpful_scripts import get_account, encode_function_data def main(): account =", "the ProxyAdmin and use that as the admin contract proxy_admin", "to simulate the initializer being the `store` function # with", "account, \"gas_limit\": 100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed to", "You can now upgrade it to dataTypesV2!\") proxy_data_types = Contract.from_abi(\"DataTypes\",", "{\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed to {proxy} ! You", "Optional, deploy the ProxyAdmin and use that as the admin", "the `store` function # with a `newValue` of 1 #", "data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes, 10 ) proxy = TransparentUpgradeableProxy.deploy( data_types.address,", "publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If we want an intializer function we", ") # Optional, deploy the ProxyAdmin and use that as", "time from brownie import ( DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config, network,", "print(f\"Deploying to {network.show_active()}\") data_types = DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], )", "account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional, deploy the ProxyAdmin and use", "= TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function, # gas limit removed fort", "from scripts.helpful_scripts import get_account, encode_function_data def main(): account = get_account()", "{network.show_active()}\") data_types = DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional,", "deploy the ProxyAdmin and use that as the admin contract", "initializer being the `store` function # with a `newValue` of", "{\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional, deploy the ProxyAdmin and", "data_types.address, proxy_admin.address, data_types_encoded_initializer_function, # gas limit removed fort an issue", "! You can now upgrade it to dataTypesV2!\") proxy_data_types =", "add # `initializer=box.store, 1` # to simulate the initializer being", "# with a `newValue` of 1 # data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes)", "function # with a `newValue` of 1 # data_types_encoded_initializer_function =", "{proxy} ! You can now upgrade it to dataTypesV2!\") proxy_data_types", "from brownie import ( DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config, network, Contract,", "1 # data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes, 10", "account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If we want an intializer function", "# {\"from\": account, \"gas_limit\": 100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy", "scripts.helpful_scripts import get_account, encode_function_data def main(): account = get_account() print(config[\"networks\"][network.show_active()])", "ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If we want an", "DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config, network, Contract, ) from scripts.helpful_scripts import", "= ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If we want", "# data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes, 10 )", "can add # `initializer=box.store, 1` # to simulate the initializer", "100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed to {proxy} !", "we can add # `initializer=box.store, 1` # to simulate the", "brownie import ( DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config, network, Contract, )", "data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes, 10 ) proxy", "proxy = TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function, # gas limit removed", ") # If we want an intializer function we can", "encode_function_data( data_types.setDataTypes, 10 ) proxy = TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function,", "an intializer function we can add # `initializer=box.store, 1` #", "If we want an intializer function we can add #", "network, Contract, ) from scripts.helpful_scripts import get_account, encode_function_data def main():", "the initializer being the `store` function # with a `newValue`", "Contract, ) from scripts.helpful_scripts import get_account, encode_function_data def main(): account", ") proxy = TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function, # gas limit", "simulate the initializer being the `store` function # with a", "proxy_admin = ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If we", "publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed to {proxy} ! You can now", "very clear # {\"from\": account, \"gas_limit\": 100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"],", "with a `newValue` of 1 # data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function", "config, network, Contract, ) from scripts.helpful_scripts import get_account, encode_function_data def", "data_types_encoded_initializer_function, # gas limit removed fort an issue not very", "intializer function we can add # `initializer=box.store, 1` # to", "want an intializer function we can add # `initializer=box.store, 1`", "function we can add # `initializer=box.store, 1` # to simulate", "= encode_function_data( data_types.setDataTypes, 10 ) proxy = TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address,", "= encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes, 10 ) proxy =", "that as the admin contract proxy_admin = ProxyAdmin.deploy( {\"from\": account},", "not very clear # {\"from\": account, \"gas_limit\": 100000000000}, {\"from\": account},", "# If we want an intializer function we can add", "deployed to {proxy} ! You can now upgrade it to", "get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\") data_types = DataTypes.deploy( {\"from\": account},", "issue not very clear # {\"from\": account, \"gas_limit\": 100000000000}, {\"from\":", "being the `store` function # with a `newValue` of 1", "= get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\") data_types = DataTypes.deploy( {\"from\":", "we want an intializer function we can add # `initializer=box.store,", "`store` function # with a `newValue` of 1 # data_types_encoded_initializer_function", "`newValue` of 1 # data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data(", "print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\") data_types = DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"],", "encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes, 10 ) proxy = TransparentUpgradeableProxy.deploy(", "DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional, deploy the ProxyAdmin", "and use that as the admin contract proxy_admin = ProxyAdmin.deploy(", "an issue not very clear # {\"from\": account, \"gas_limit\": 100000000000},", "def main(): account = get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\") data_types", "TransparentUpgradeableProxy, ProxyAdmin, config, network, Contract, ) from scripts.helpful_scripts import get_account,", ") print(f\"Proxy deployed to {proxy} ! You can now upgrade", "to {network.show_active()}\") data_types = DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) #", "ProxyAdmin and use that as the admin contract proxy_admin =", "1` # to simulate the initializer being the `store` function", "gas limit removed fort an issue not very clear #", "now upgrade it to dataTypesV2!\") proxy_data_types = Contract.from_abi(\"DataTypes\", proxy.address, DataTypes.abi)", "# to simulate the initializer being the `store` function #", "# Optional, deploy the ProxyAdmin and use that as the", "# gas limit removed fort an issue not very clear", "{\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If we want an intializer", "limit removed fort an issue not very clear # {\"from\":", "\"gas_limit\": 100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed to {proxy}", "import ( DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config, network, Contract, ) from", "fort an issue not very clear # {\"from\": account, \"gas_limit\":", "`initializer=box.store, 1` # to simulate the initializer being the `store`", "( DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config, network, Contract, ) from scripts.helpful_scripts", "import get_account, encode_function_data def main(): account = get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying", "removed fort an issue not very clear # {\"from\": account,", "data_types = DataTypes.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # Optional, deploy", "clear # {\"from\": account, \"gas_limit\": 100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], )", "import time from brownie import ( DataTypes, TransparentUpgradeableProxy, ProxyAdmin, config,", "# `initializer=box.store, 1` # to simulate the initializer being the", "account = get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\") data_types = DataTypes.deploy(", "print(f\"Proxy deployed to {proxy} ! You can now upgrade it", "the admin contract proxy_admin = ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], )", "ProxyAdmin, config, network, Contract, ) from scripts.helpful_scripts import get_account, encode_function_data", "get_account, encode_function_data def main(): account = get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to", "account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed to {proxy} ! You can", "a `newValue` of 1 # data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function =", "data_types.setDataTypes, 10 ) proxy = TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function, #", "{\"from\": account, \"gas_limit\": 100000000000}, {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) print(f\"Proxy deployed", "to {proxy} ! You can now upgrade it to dataTypesV2!\")", "contract proxy_admin = ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) # If", "can now upgrade it to dataTypesV2!\") proxy_data_types = Contract.from_abi(\"DataTypes\", proxy.address,", "admin contract proxy_admin = ProxyAdmin.deploy( {\"from\": account}, publish_source=config[\"networks\"][network.show_active()][\"verify\"], ) #", "TransparentUpgradeableProxy.deploy( data_types.address, proxy_admin.address, data_types_encoded_initializer_function, # gas limit removed fort an", "encode_function_data def main(): account = get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\")", "proxy_admin.address, data_types_encoded_initializer_function, # gas limit removed fort an issue not", "of 1 # data_types_encoded_initializer_function = encode_function_data(data_types.setDataTypes) data_types_encoded_initializer_function = encode_function_data( data_types.setDataTypes,", "main(): account = get_account() print(config[\"networks\"][network.show_active()]) print(f\"Deploying to {network.show_active()}\") data_types =" ]
[ "recurrent.view(T * b, h) output = self.embedding(t_rec) # [T *", "* b, h) output = self.embedding(t_rec) # [T * b,", "convolutional feature map def __init__(self, nIn, nHidden, nOut): super(BidirectionalLSTM, self).__init__()", "nHidden, nOut): super(BidirectionalLSTM, self).__init__() self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding", "* 2, nOut) self.rnn.cuda() self.embedding.cuda() def forward(self, input): recurrent, _", "nn class BidirectionalLSTM(nn.Module): # Module to extract BLSTM features from", "_ = self.rnn(input) T, b, h = recurrent.size() t_rec =", "self.rnn.cuda() self.embedding.cuda() def forward(self, input): recurrent, _ = self.rnn(input) T,", "= recurrent.view(T * b, h) output = self.embedding(t_rec) # [T", "b, h = recurrent.size() t_rec = recurrent.view(T * b, h)", "class BidirectionalLSTM(nn.Module): # Module to extract BLSTM features from convolutional", "__init__(self, nIn, nHidden, nOut): super(BidirectionalLSTM, self).__init__() self.rnn = nn.LSTM(nIn, nHidden,", "feature map def __init__(self, nIn, nHidden, nOut): super(BidirectionalLSTM, self).__init__() self.rnn", "forward(self, input): recurrent, _ = self.rnn(input) T, b, h =", "input): recurrent, _ = self.rnn(input) T, b, h = recurrent.size()", "[T * b, nOut] output = output.view(T, b, -1) return", "h) output = self.embedding(t_rec) # [T * b, nOut] output", "<filename>modules/BidirectionalLSTM.py import torch.nn as nn class BidirectionalLSTM(nn.Module): # Module to", "to extract BLSTM features from convolutional feature map def __init__(self,", "nIn, nHidden, nOut): super(BidirectionalLSTM, self).__init__() self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True)", "BidirectionalLSTM(nn.Module): # Module to extract BLSTM features from convolutional feature", "self.embedding.cuda() def forward(self, input): recurrent, _ = self.rnn(input) T, b,", "def __init__(self, nIn, nHidden, nOut): super(BidirectionalLSTM, self).__init__() self.rnn = nn.LSTM(nIn,", "b, h) output = self.embedding(t_rec) # [T * b, nOut]", "super(BidirectionalLSTM, self).__init__() self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding = nn.Linear(nHidden", "# Module to extract BLSTM features from convolutional feature map", "self.embedding = nn.Linear(nHidden * 2, nOut) self.rnn.cuda() self.embedding.cuda() def forward(self,", "* b, nOut] output = output.view(T, b, -1) return output", "from convolutional feature map def __init__(self, nIn, nHidden, nOut): super(BidirectionalLSTM,", "recurrent, _ = self.rnn(input) T, b, h = recurrent.size() t_rec", "= nn.Linear(nHidden * 2, nOut) self.rnn.cuda() self.embedding.cuda() def forward(self, input):", "= self.embedding(t_rec) # [T * b, nOut] output = output.view(T,", "# [T * b, nOut] output = output.view(T, b, -1)", "def forward(self, input): recurrent, _ = self.rnn(input) T, b, h", "nOut): super(BidirectionalLSTM, self).__init__() self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding =", "nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding = nn.Linear(nHidden * 2, nOut) self.rnn.cuda()", "Module to extract BLSTM features from convolutional feature map def", "recurrent.size() t_rec = recurrent.view(T * b, h) output = self.embedding(t_rec)", "BLSTM features from convolutional feature map def __init__(self, nIn, nHidden,", "nHidden, bidirectional=True) self.embedding = nn.Linear(nHidden * 2, nOut) self.rnn.cuda() self.embedding.cuda()", "output = self.embedding(t_rec) # [T * b, nOut] output =", "bidirectional=True) self.embedding = nn.Linear(nHidden * 2, nOut) self.rnn.cuda() self.embedding.cuda() def", "2, nOut) self.rnn.cuda() self.embedding.cuda() def forward(self, input): recurrent, _ =", "self.embedding(t_rec) # [T * b, nOut] output = output.view(T, b,", "self.rnn(input) T, b, h = recurrent.size() t_rec = recurrent.view(T *", "map def __init__(self, nIn, nHidden, nOut): super(BidirectionalLSTM, self).__init__() self.rnn =", "t_rec = recurrent.view(T * b, h) output = self.embedding(t_rec) #", "self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding = nn.Linear(nHidden * 2,", "torch.nn as nn class BidirectionalLSTM(nn.Module): # Module to extract BLSTM", "extract BLSTM features from convolutional feature map def __init__(self, nIn,", "= nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding = nn.Linear(nHidden * 2, nOut)", "h = recurrent.size() t_rec = recurrent.view(T * b, h) output", "self).__init__() self.rnn = nn.LSTM(nIn, nHidden, bidirectional=True) self.embedding = nn.Linear(nHidden *", "import torch.nn as nn class BidirectionalLSTM(nn.Module): # Module to extract", "nn.Linear(nHidden * 2, nOut) self.rnn.cuda() self.embedding.cuda() def forward(self, input): recurrent,", "= self.rnn(input) T, b, h = recurrent.size() t_rec = recurrent.view(T", "T, b, h = recurrent.size() t_rec = recurrent.view(T * b,", "as nn class BidirectionalLSTM(nn.Module): # Module to extract BLSTM features", "= recurrent.size() t_rec = recurrent.view(T * b, h) output =", "nOut) self.rnn.cuda() self.embedding.cuda() def forward(self, input): recurrent, _ = self.rnn(input)", "features from convolutional feature map def __init__(self, nIn, nHidden, nOut):" ]
[ "for signature \"\"\" pass @staticmethod def __new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics:", "Graphics=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the graphics", "the graphics used to paint. Get: Graphics(self: PaintEventArgs) -> Graphics", "Dispose(self: PaintEventArgs) Releases all resources used by the System.Windows.Forms.PaintEventArgs. \"\"\"", "__enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args):", "data for the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\" def", "self: None) \"\"\"Gets the rectangle in which to paint. Get:", "\"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object)", "\"\"\" __new__(cls: type,graphics: Graphics,clipRect: Rectangle) \"\"\" pass ClipRectangle=property(lambda self: object(),lambda", "graphics used to paint. Get: Graphics(self: PaintEventArgs) -> Graphics \"\"\"", "rectangle in which to paint. Get: ClipRectangle(self: PaintEventArgs) -> Rectangle", "Get: ClipRectangle(self: PaintEventArgs) -> Rectangle \"\"\" Graphics=property(lambda self: object(),lambda self,v:", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\"", "__exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\"", "def Dispose(self): \"\"\" Dispose(self: PaintEventArgs) Releases all resources used by", "__exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def", "PaintEventArgs(EventArgs,IDisposable): \"\"\" Provides data for the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect:", "__init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes", "\"\"\" Graphics=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the", "has been arbitrarily put into the stubs\"\"\" return PaintEventArgs() def", "used by the System.Windows.Forms.PaintEventArgs. \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self:", "-> Rectangle \"\"\" Graphics=property(lambda self: object(),lambda self,v: None,lambda self: None)", "initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__", "Rectangle) \"\"\" def Instance(self): \"\"\" This function has been arbitrarily", "Rectangle) \"\"\" pass ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda self: None)", "Dispose(self): \"\"\" Dispose(self: PaintEventArgs) Releases all resources used by the", "put into the stubs\"\"\" return PaintEventArgs() def Dispose(self): \"\"\" Dispose(self:", "object(),lambda self,v: None,lambda self: None) \"\"\"Gets the rectangle in which", "Rectangle \"\"\" Graphics=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets", "pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass", "the stubs\"\"\" return PaintEventArgs() def Dispose(self): \"\"\" Dispose(self: PaintEventArgs) Releases", "def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def", "None,lambda self: None) \"\"\"Gets the rectangle in which to paint.", "arbitrarily put into the stubs\"\"\" return PaintEventArgs() def Dispose(self): \"\"\"", "class PaintEventArgs(EventArgs,IDisposable): \"\"\" Provides data for the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics:", "None) \"\"\"Gets the rectangle in which to paint. Get: ClipRectangle(self:", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature", "object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x;", "signature \"\"\" pass @staticmethod def __new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics: Graphics,clipRect:", "IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...)", "paint. Get: ClipRectangle(self: PaintEventArgs) -> Rectangle \"\"\" Graphics=property(lambda self: object(),lambda", "PaintEventArgs) -> Rectangle \"\"\" Graphics=property(lambda self: object(),lambda self,v: None,lambda self:", "function has been arbitrarily put into the stubs\"\"\" return PaintEventArgs()", "\"\"\" This function has been arbitrarily put into the stubs\"\"\"", "\"\"\" Dispose(self: PaintEventArgs) Releases all resources used by the System.Windows.Forms.PaintEventArgs.", "\"\"\" pass @staticmethod def __new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics: Graphics,clipRect: Rectangle)", "initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def", "x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes", "\"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x;", "__new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics: Graphics,clipRect: Rectangle) \"\"\" pass ClipRectangle=property(lambda self:", "the System.Windows.Forms.PaintEventArgs. \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) ->", "def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass", "Provides data for the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\"", "PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\" def Instance(self): \"\"\" This function has", "\"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object \"\"\"", "pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\"", "object,exc_back: object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x;", "been arbitrarily put into the stubs\"\"\" return PaintEventArgs() def Dispose(self):", "x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see", "to paint. Get: ClipRectangle(self: PaintEventArgs) -> Rectangle \"\"\" Graphics=property(lambda self:", "System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\" def Instance(self): \"\"\" This", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the rectangle in", "IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type:", "@staticmethod def __new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics: Graphics,clipRect: Rectangle) \"\"\" pass", "def __new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics: Graphics,clipRect: Rectangle) \"\"\" pass ClipRectangle=property(lambda", "x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,graphics,clipRect): \"\"\" __new__(cls:", "return PaintEventArgs() def Dispose(self): \"\"\" Dispose(self: PaintEventArgs) Releases all resources", "for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass", "PaintEventArgs) Releases all resources used by the System.Windows.Forms.PaintEventArgs. \"\"\" pass", "None,lambda self: None) \"\"\"Gets the graphics used to paint. Get:", "see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "object(),lambda self,v: None,lambda self: None) \"\"\"Gets the graphics used to", "for the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\" def Instance(self):", "System.Windows.Forms.PaintEventArgs. \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable) -> object", "__enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self:", "which to paint. Get: ClipRectangle(self: PaintEventArgs) -> Rectangle \"\"\" Graphics=property(lambda", "This function has been arbitrarily put into the stubs\"\"\" return", "stubs\"\"\" return PaintEventArgs() def Dispose(self): \"\"\" Dispose(self: PaintEventArgs) Releases all", "in which to paint. Get: ClipRectangle(self: PaintEventArgs) -> Rectangle \"\"\"", "into the stubs\"\"\" return PaintEventArgs() def Dispose(self): \"\"\" Dispose(self: PaintEventArgs)", "Releases all resources used by the System.Windows.Forms.PaintEventArgs. \"\"\" pass def", "\"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) \"\"\" pass def __init__(self,*args):", "self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the graphics used", "None) \"\"\"Gets the graphics used to paint. Get: Graphics(self: PaintEventArgs)", "resources used by the System.Windows.Forms.PaintEventArgs. \"\"\" pass def __enter__(self,*args): \"\"\"", "__new__(cls: type,graphics: Graphics,clipRect: Rectangle) \"\"\" pass ClipRectangle=property(lambda self: object(),lambda self,v:", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see", "signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod", "\"\"\" def Instance(self): \"\"\" This function has been arbitrarily put", "self,v: None,lambda self: None) \"\"\"Gets the rectangle in which to", "object) \"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see", "def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...)", "\"\"\" __enter__(self: IDisposable) -> object \"\"\" pass def __exit__(self,*args): \"\"\"", "Graphics,clipRect: Rectangle) \"\"\" pass ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda self:", "by the System.Windows.Forms.PaintEventArgs. \"\"\" pass def __enter__(self,*args): \"\"\" __enter__(self: IDisposable)", "pass @staticmethod def __new__(self,graphics,clipRect): \"\"\" __new__(cls: type,graphics: Graphics,clipRect: Rectangle) \"\"\"", "the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\" def Instance(self): \"\"\"", "pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__ for", "self: None) \"\"\"Gets the graphics used to paint. Get: Graphics(self:", "Instance(self): \"\"\" This function has been arbitrarily put into the", "ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the rectangle", "Graphics,clipRect: Rectangle) \"\"\" def Instance(self): \"\"\" This function has been", "\"\"\"Gets the rectangle in which to paint. Get: ClipRectangle(self: PaintEventArgs)", "see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,graphics,clipRect): \"\"\"", "ClipRectangle(self: PaintEventArgs) -> Rectangle \"\"\" Graphics=property(lambda self: object(),lambda self,v: None,lambda", "pass ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets the", "event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle) \"\"\" def Instance(self): \"\"\" This function", "-> object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value:", "the rectangle in which to paint. Get: ClipRectangle(self: PaintEventArgs) ->", "type,graphics: Graphics,clipRect: Rectangle) \"\"\" pass ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda", "def Instance(self): \"\"\" This function has been arbitrarily put into", "self,v: None,lambda self: None) \"\"\"Gets the graphics used to paint.", "all resources used by the System.Windows.Forms.PaintEventArgs. \"\"\" pass def __enter__(self,*args):", "\"\"\"Gets the graphics used to paint. Get: Graphics(self: PaintEventArgs) ->", "\"\"\" pass ClipRectangle=property(lambda self: object(),lambda self,v: None,lambda self: None) \"\"\"Gets", "x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for", "x; see x.__class__.__doc__ for signature \"\"\" pass @staticmethod def __new__(self,graphics,clipRect):", "\"\"\" Provides data for the System.Windows.Forms.Control.Paint event. PaintEventArgs(graphics: Graphics,clipRect: Rectangle)", "object \"\"\" pass def __exit__(self,*args): \"\"\" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back:", "\"\"\" pass def __init__(self,*args): \"\"\" x.__init__(...) initializes x; see x.__class__.__doc__", "PaintEventArgs() def Dispose(self): \"\"\" Dispose(self: PaintEventArgs) Releases all resources used" ]
[ "# ================== TRAINING SETTINGS ================== # import argparse import os", "'pin_memory': opt.pin_memory} if use_cuda else {} train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size,", "load_dataset(opt, train=True) print('training data size: {}'.format(len(dataset_train))) print('validation data size: {}'.format(len(dataset_validation)))", "for multi-GPU usage') parser.add_argument('--pin_memory', default=True, type=bool, help='pin memory option selector')", "opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu != 0: print()", "================== GPU SETTINGS ================== # def gpu_setup(opt): use_cuda = not", "# training parameter setting parser.add_argument('--n_epoch', default=10, type=int, help='number of total", "= torch.device('cuda' if torch.cuda.is_available() else 'cpu') else: print() print('Activating single-gpu", "else 'cpu') else: print() print('Activating single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES'] =", "torch from torch.utils.data import DataLoader torch.manual_seed(opt.seed) # ================== GPU SETTINGS", "default='CNN', type=str, help='model to use') parser.add_argument('--seed', default=42, type=int, help='random seed", "batch_size=opt.test_batch_size, shuffle=True, **kwargs) model = load_model(opt) if opt.multi_gpu != 0:", "of total training iteration') parser.add_argument('--batch_size', default=32, type=int, help='size of minibatch')", "SETTINGS ================== # def gpu_setup(opt): use_cuda = not opt.no_cuda and", "written by <NAME> # version 0.1 # ================== IMPORT CUSTOM", "main(opt): use_cuda = gpu_setup(opt) dataset_train, dataset_validation = load_dataset(opt, train=True) print('training", "default='default', type=str, help='choose the data transform type') # training parameter", "default=32, type=int, help='size of minibatch') parser.add_argument('--test_batch_size', default=32, type=int, help='size of", "use') parser.add_argument('--seed', default=42, type=int, help='random seed (default: 42)') parser.add_argument('--num_worker', default=1,", "default='mnist', type=str, help='dataset to use') parser.add_argument('--model', default='CNN', type=str, help='model to", "default='steplr', type=str, help='scheduler select') opt = parser.parse_args() # ===================== IMPORT", "= {'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory} if use_cuda else {} train_dataloader", "================== # import torch from torch.utils.data import DataLoader torch.manual_seed(opt.seed) #", "default=1, type=int, help='number of dataloader worker') parser.add_argument('--no_cuda', action='store_true', default=False, help='disables", "parser.add_argument('--lr', default=0.03, type=float, help='training learning rate') parser.add_argument('--optimizer', default='adam', type=str, help='optimizer", "str(opt.gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') print('Using gpu", "to use') parser.add_argument('--model', default='CNN', type=str, help='model to use') parser.add_argument('--seed', default=42,", "================== TRAINING SETTINGS ================== # import argparse import os parser", "type=str, help='model to use') parser.add_argument('--seed', default=42, type=int, help='random seed (default:", "for the validation data') parser.add_argument('--transform', default='default', type=str, help='choose the data", "import os parser = argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str, help='type of", "{}'.format(len(dataset_validation))) dataset_test = load_dataset(opt, train=False) print('test data size: {}'.format(len(dataset_test))) print()", "save Logs') # data setting parser.add_argument('--val_rate', default=0.2, type=float, help='split rate", "================== # import argparse import os parser = argparse.ArgumentParser() parser.add_argument('--train_method',", "DataLoader torch.manual_seed(opt.seed) # ================== GPU SETTINGS ================== # def gpu_setup(opt):", "use_cuda = gpu_setup(opt) dataset_train, dataset_validation = load_dataset(opt, train=True) print('training data", "def main(opt): use_cuda = gpu_setup(opt) dataset_train, dataset_validation = load_dataset(opt, train=True)", "SCRIPT ============================= # def main(opt): use_cuda = gpu_setup(opt) dataset_train, dataset_validation", "type=int, help='random seed (default: 42)') parser.add_argument('--num_worker', default=1, type=int, help='number of", "help='choose the data transform type') # training parameter setting parser.add_argument('--n_epoch',", "os parser = argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str, help='type of training:", "iteration') parser.add_argument('--batch_size', default=32, type=int, help='size of minibatch') parser.add_argument('--test_batch_size', default=32, type=int,", "help='model to use') parser.add_argument('--seed', default=42, type=int, help='random seed (default: 42)')", "the data transform type') # training parameter setting parser.add_argument('--n_epoch', default=10,", "type=int, help='size of test-minibatch') # optimizer & scheduler setting parser.add_argument('--lr',", "shuffle=True, **kwargs) test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs) model =", "default='adam', type=str, help='optimizer select') parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler select') opt", "transform type') # training parameter setting parser.add_argument('--n_epoch', default=10, type=int, help='number", "kwargs = {'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory} if use_cuda else {}", "os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')", "torch.device('cuda' if torch.cuda.is_available() else 'cpu') else: print() print('Activating single-gpu training", "load_model # ================== TRAINING SETTINGS ================== # import argparse import", "to save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where to save Logs')", "===================== # from customs.train import train, test from customs.dataset import", "parser.add_argument('--test_batch_size', default=32, type=int, help='size of test-minibatch') # optimizer & scheduler", "# version 0.1 # ================== IMPORT CUSTOM LEARNING LIBRARIES =====================", "and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu != 0: print() print('Activating", "import torch from torch.utils.data import DataLoader torch.manual_seed(opt.seed) # ================== GPU", "mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else", "argparse import os parser = argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str, help='type", "<NAME> # version 0.1 # ================== IMPORT CUSTOM LEARNING LIBRARIES", "help='size of test-minibatch') # optimizer & scheduler setting parser.add_argument('--lr', default=0.03,", "**kwargs) test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs) model = load_model(opt)", "not opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu != 0:", "import load_model # ================== TRAINING SETTINGS ================== # import argparse", "data') parser.add_argument('--transform', default='default', type=str, help='choose the data transform type') #", "else {} train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader =", "multi-gpu training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device = torch.device('cuda'", "type=str, help='optimizer select') parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler select') opt =", "Saving the current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where to save", "' + str(opt.gpu)) return use_cuda # ======================= MAIN SCRIPT =============================", "optimizer & scheduler setting parser.add_argument('--lr', default=0.03, type=float, help='training learning rate')", "parser.add_argument('--model', default='CNN', type=str, help='model to use') parser.add_argument('--seed', default=42, type=int, help='random", "DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs)", "help='For Saving the current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where to", "parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where to save Logs') # data setting", "================== IMPORT CUSTOM LEARNING LIBRARIES ===================== # from customs.train import", "to use') parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids for multi-GPU usage') parser.add_argument('--pin_memory',", "def gpu_setup(opt): use_cuda = not opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\"", "torch.device('cuda' if torch.cuda.is_available() else 'cpu') print('Using gpu number ' +", "# def main(opt): use_cuda = gpu_setup(opt) dataset_train, dataset_validation = load_dataset(opt,", "train, test from customs.dataset import load_dataset from customs.model import load_model", "print() print('Activating multi-gpu training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device", "of minibatch') parser.add_argument('--test_batch_size', default=32, type=int, help='size of test-minibatch') # optimizer", "select') opt = parser.parse_args() # ===================== IMPORT PYTORCH LIBRARIES ==================", "shuffle=True, **kwargs) validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader =", "customs.model import load_model # ================== TRAINING SETTINGS ================== # import", "DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs) model = load_model(opt) if opt.multi_gpu !=", "supervised(default), unsupervised, reinforce') parser.add_argument('--task', default='classification', type=str, help='task of training: classification(default),", "of training: supervised(default), unsupervised, reinforce') parser.add_argument('--task', default='classification', type=str, help='task of", "type=str, help='GPU-id for GPU to use') parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids", "customs.dataset import load_dataset from customs.model import load_model # ================== TRAINING", "= DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs) model = load_model(opt) if opt.multi_gpu", "memory option selector') parser.add_argument('--save_model', action='store_true', default=False, help='For Saving the current", "parser.add_argument('--dataset', default='mnist', type=str, help='dataset to use') parser.add_argument('--model', default='CNN', type=str, help='model", "LEARNING LIBRARIES ===================== # from customs.train import train, test from", "'cpu') print('Using gpu number ' + str(opt.gpu)) return use_cuda #", "type=str, help='Where to save Logs') # data setting parser.add_argument('--val_rate', default=0.2,", "total training iteration') parser.add_argument('--batch_size', default=32, type=int, help='size of minibatch') parser.add_argument('--test_batch_size',", "**kwargs) validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader = DataLoader(dataset_test,", "to use') parser.add_argument('--seed', default=42, type=int, help='random seed (default: 42)') parser.add_argument('--num_worker',", "= torch.device('cuda' if torch.cuda.is_available() else 'cpu') print('Using gpu number '", "load_model(opt) if opt.multi_gpu != 0: model = torch.nn.DataParallel(model) model.to(opt.device) train(opt,", "type=int, help='number of total training iteration') parser.add_argument('--batch_size', default=32, type=int, help='size", "type=float, help='training learning rate') parser.add_argument('--optimizer', default='adam', type=str, help='optimizer select') parser.add_argument('--scheduler',", "help='number of dataloader worker') parser.add_argument('--no_cuda', action='store_true', default=False, help='disables CUDA training')", "size: {}'.format(len(dataset_train))) print('validation data size: {}'.format(len(dataset_validation))) dataset_test = load_dataset(opt, train=False)", "!= 0: model = torch.nn.DataParallel(model) model.to(opt.device) train(opt, model, train_dataloader, validation_dataloader)", "type=str, help='choose the data transform type') # training parameter setting", "scheduler setting parser.add_argument('--lr', default=0.03, type=float, help='training learning rate') parser.add_argument('--optimizer', default='adam',", "str(opt.multi_gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') else: print()", "help='optimizer select') parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler select') opt = parser.parse_args()", "GPU SETTINGS ================== # def gpu_setup(opt): use_cuda = not opt.no_cuda", "opt.multi_gpu != 0: print() print('Activating multi-gpu training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES']", "opt.num_worker, 'pin_memory': opt.pin_memory} if use_cuda else {} train_dataloader = DataLoader(dataset_train,", "= load_dataset(opt, train=False) print('test data size: {}'.format(len(dataset_test))) print() kwargs =", "setting parser.add_argument('--lr', default=0.03, type=float, help='training learning rate') parser.add_argument('--optimizer', default='adam', type=str,", "help='random seed (default: 42)') parser.add_argument('--num_worker', default=1, type=int, help='number of dataloader", "if torch.cuda.is_available() else 'cpu') else: print() print('Activating single-gpu training mode')", "# ======================= MAIN SCRIPT ============================= # def main(opt): use_cuda =", "{}'.format(len(dataset_test))) print() kwargs = {'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory} if use_cuda", "help='GPU-id for GPU to use') parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids for", "else 'cpu') print('Using gpu number ' + str(opt.gpu)) return use_cuda", "test-minibatch') # optimizer & scheduler setting parser.add_argument('--lr', default=0.03, type=float, help='training", "print('test data size: {}'.format(len(dataset_test))) print() kwargs = {'num_workers': opt.num_worker, 'pin_memory':", "= not opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu !=", "training: classification(default), regression') parser.add_argument('--dataset', default='mnist', type=str, help='dataset to use') parser.add_argument('--model',", "& scheduler setting parser.add_argument('--lr', default=0.03, type=float, help='training learning rate') parser.add_argument('--optimizer',", "LIBRARIES ===================== # from customs.train import train, test from customs.dataset", "use_cuda else {} train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader", "LIBRARIES ================== # import torch from torch.utils.data import DataLoader torch.manual_seed(opt.seed)", "use_cuda # ======================= MAIN SCRIPT ============================= # def main(opt): use_cuda", "type=float, help='split rate for the validation data') parser.add_argument('--transform', default='default', type=str,", "model.to(opt.device) train(opt, model, train_dataloader, validation_dataloader) test(opt, model, test_dataloader) if __name__", "print('Activating single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device = torch.device('cuda'", "default=10, type=int, help='number of total training iteration') parser.add_argument('--batch_size', default=32, type=int,", "print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else", "0.1 # ================== IMPORT CUSTOM LEARNING LIBRARIES ===================== # from", "default=0.03, type=float, help='training learning rate') parser.add_argument('--optimizer', default='adam', type=str, help='optimizer select')", "'cpu') else: print() print('Activating single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu)", "default=True, type=bool, help='pin memory option selector') parser.add_argument('--save_model', action='store_true', default=False, help='For", "print('Activating multi-gpu training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device =", "= load_dataset(opt, train=True) print('training data size: {}'.format(len(dataset_train))) print('validation data size:", "opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') print('Using gpu number", "from customs.dataset import load_dataset from customs.model import load_model # ==================", "train_dataloader, validation_dataloader) test(opt, model, test_dataloader) if __name__ == '__main__': main(opt)", "# written by <NAME> # version 0.1 # ================== IMPORT", "worker') parser.add_argument('--no_cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--gpu', default=0, type=str,", "help='task of training: classification(default), regression') parser.add_argument('--dataset', default='mnist', type=str, help='dataset to", "default=32, type=int, help='size of test-minibatch') # optimizer & scheduler setting", "torch.cuda.is_available() else 'cpu') else: print() print('Activating single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES']", "training iteration') parser.add_argument('--batch_size', default=32, type=int, help='size of minibatch') parser.add_argument('--test_batch_size', default=32,", "help='split rate for the validation data') parser.add_argument('--transform', default='default', type=str, help='choose", "SETTINGS ================== # import argparse import os parser = argparse.ArgumentParser()", "action='store_true', default=False, help='disables CUDA training') parser.add_argument('--gpu', default=0, type=str, help='GPU-id for", "default=0, type=str, help='GPU-ids for multi-GPU usage') parser.add_argument('--pin_memory', default=True, type=bool, help='pin", "default=os.getcwd()+'/Logs', type=str, help='Where to save Logs') # data setting parser.add_argument('--val_rate',", "default='classification', type=str, help='task of training: classification(default), regression') parser.add_argument('--dataset', default='mnist', type=str,", "parser.add_argument('--transform', default='default', type=str, help='choose the data transform type') # training", "model, train_dataloader, validation_dataloader) test(opt, model, test_dataloader) if __name__ == '__main__':", "type=str, help='Where to save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where to", "parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler select') opt = parser.parse_args() # =====================", "os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu != 0: print() print('Activating multi-gpu training", "gpu_setup(opt): use_cuda = not opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if", "train=False) print('test data size: {}'.format(len(dataset_test))) print() kwargs = {'num_workers': opt.num_worker,", "= parser.parse_args() # ===================== IMPORT PYTORCH LIBRARIES ================== # import", "= load_model(opt) if opt.multi_gpu != 0: model = torch.nn.DataParallel(model) model.to(opt.device)", "else: print() print('Activating single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device", "parser = argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str, help='type of training: supervised(default),", "test from customs.dataset import load_dataset from customs.model import load_model #", "{'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory} if use_cuda else {} train_dataloader =", "current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where to save weights') parser.add_argument('--log_path',", "selector') parser.add_argument('--save_model', action='store_true', default=False, help='For Saving the current Model') parser.add_argument('--save_path',", "use_cuda = not opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu", "DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs)", "**kwargs) model = load_model(opt) if opt.multi_gpu != 0: model =", "parser.add_argument('--pin_memory', default=True, type=bool, help='pin memory option selector') parser.add_argument('--save_model', action='store_true', default=False,", "the current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where to save weights')", "load_dataset(opt, train=False) print('test data size: {}'.format(len(dataset_test))) print() kwargs = {'num_workers':", "help='disables CUDA training') parser.add_argument('--gpu', default=0, type=str, help='GPU-id for GPU to", "help='GPU-ids for multi-GPU usage') parser.add_argument('--pin_memory', default=True, type=bool, help='pin memory option", "customs.train import train, test from customs.dataset import load_dataset from customs.model", "# data setting parser.add_argument('--val_rate', default=0.2, type=float, help='split rate for the", "IMPORT CUSTOM LEARNING LIBRARIES ===================== # from customs.train import train,", "default=os.getcwd()+'/weights', type=str, help='Where to save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where", "os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')", "batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs) model", "parser.add_argument('--num_worker', default=1, type=int, help='number of dataloader worker') parser.add_argument('--no_cuda', action='store_true', default=False,", "data size: {}'.format(len(dataset_test))) print() kwargs = {'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory}", "# from customs.train import train, test from customs.dataset import load_dataset", "PYTORCH LIBRARIES ================== # import torch from torch.utils.data import DataLoader", "torch.cuda.is_available() else 'cpu') print('Using gpu number ' + str(opt.gpu)) return", "CUDA training') parser.add_argument('--gpu', default=0, type=str, help='GPU-id for GPU to use')", "use') parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids for multi-GPU usage') parser.add_argument('--pin_memory', default=True,", "version 0.1 # ================== IMPORT CUSTOM LEARNING LIBRARIES ===================== #", "learning rate') parser.add_argument('--optimizer', default='adam', type=str, help='optimizer select') parser.add_argument('--scheduler', default='steplr', type=str,", "setting parser.add_argument('--val_rate', default=0.2, type=float, help='split rate for the validation data')", "by <NAME> # version 0.1 # ================== IMPORT CUSTOM LEARNING", "validation data') parser.add_argument('--transform', default='default', type=str, help='choose the data transform type')", "usage') parser.add_argument('--pin_memory', default=True, type=bool, help='pin memory option selector') parser.add_argument('--save_model', action='store_true',", "load_dataset from customs.model import load_model # ================== TRAINING SETTINGS ==================", "single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device = torch.device('cuda' if", "training mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device = torch.device('cuda' if torch.cuda.is_available()", "select') parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler select') opt = parser.parse_args() #", "42)') parser.add_argument('--num_worker', default=1, type=int, help='number of dataloader worker') parser.add_argument('--no_cuda', action='store_true',", "number ' + str(opt.gpu)) return use_cuda # ======================= MAIN SCRIPT", "IMPORT PYTORCH LIBRARIES ================== # import torch from torch.utils.data import", "=\"PCI_BUS_ID\" if opt.multi_gpu != 0: print() print('Activating multi-gpu training mode')", "seed (default: 42)') parser.add_argument('--num_worker', default=1, type=int, help='number of dataloader worker')", "if opt.multi_gpu != 0: model = torch.nn.DataParallel(model) model.to(opt.device) train(opt, model,", "reinforce') parser.add_argument('--task', default='classification', type=str, help='task of training: classification(default), regression') parser.add_argument('--dataset',", "option selector') parser.add_argument('--save_model', action='store_true', default=False, help='For Saving the current Model')", "model = load_model(opt) if opt.multi_gpu != 0: model = torch.nn.DataParallel(model)", "# ================== IMPORT CUSTOM LEARNING LIBRARIES ===================== # from customs.train", "help='size of minibatch') parser.add_argument('--test_batch_size', default=32, type=int, help='size of test-minibatch') #", "from torch.utils.data import DataLoader torch.manual_seed(opt.seed) # ================== GPU SETTINGS ==================", "if use_cuda else {} train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs)", "(default: 42)') parser.add_argument('--num_worker', default=1, type=int, help='number of dataloader worker') parser.add_argument('--no_cuda',", "+ str(opt.gpu)) return use_cuda # ======================= MAIN SCRIPT ============================= #", "default=0, type=str, help='GPU-id for GPU to use') parser.add_argument('--multi_gpu', default=0, type=str,", "Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where to save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs',", "type=int, help='size of minibatch') parser.add_argument('--test_batch_size', default=32, type=int, help='size of test-minibatch')", "parser.add_argument('--no_cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--gpu', default=0, type=str, help='GPU-id", "parser.add_argument('--task', default='classification', type=str, help='task of training: classification(default), regression') parser.add_argument('--dataset', default='mnist',", "============================= # def main(opt): use_cuda = gpu_setup(opt) dataset_train, dataset_validation =", "dataset_validation = load_dataset(opt, train=True) print('training data size: {}'.format(len(dataset_train))) print('validation data", "# import argparse import os parser = argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised',", "gpu_setup(opt) dataset_train, dataset_validation = load_dataset(opt, train=True) print('training data size: {}'.format(len(dataset_train)))", "batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader", "default='supervised', type=str, help='type of training: supervised(default), unsupervised, reinforce') parser.add_argument('--task', default='classification',", "data transform type') # training parameter setting parser.add_argument('--n_epoch', default=10, type=int,", "to save Logs') # data setting parser.add_argument('--val_rate', default=0.2, type=float, help='split", "Logs') # data setting parser.add_argument('--val_rate', default=0.2, type=float, help='split rate for", "opt.pin_memory} if use_cuda else {} train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True,", "print('Using gpu number ' + str(opt.gpu)) return use_cuda # =======================", "# ===================== IMPORT PYTORCH LIBRARIES ================== # import torch from", "======================= MAIN SCRIPT ============================= # def main(opt): use_cuda = gpu_setup(opt)", "size: {}'.format(len(dataset_validation))) dataset_test = load_dataset(opt, train=False) print('test data size: {}'.format(len(dataset_test)))", "save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where to save Logs') #", "# optimizer & scheduler setting parser.add_argument('--lr', default=0.03, type=float, help='training learning", "the validation data') parser.add_argument('--transform', default='default', type=str, help='choose the data transform", "str(opt.gpu)) return use_cuda # ======================= MAIN SCRIPT ============================= # def", "help='type of training: supervised(default), unsupervised, reinforce') parser.add_argument('--task', default='classification', type=str, help='task", "shuffle=True, **kwargs) model = load_model(opt) if opt.multi_gpu != 0: model", "rate for the validation data') parser.add_argument('--transform', default='default', type=str, help='choose the", "help='scheduler select') opt = parser.parse_args() # ===================== IMPORT PYTORCH LIBRARIES", "GPU to use') parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids for multi-GPU usage')", "= torch.nn.DataParallel(model) model.to(opt.device) train(opt, model, train_dataloader, validation_dataloader) test(opt, model, test_dataloader)", "= argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str, help='type of training: supervised(default), unsupervised,", "= DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True,", "mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device = torch.device('cuda' if torch.cuda.is_available()", "return use_cuda # ======================= MAIN SCRIPT ============================= # def main(opt):", "training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu) opt.device = torch.device('cuda' if", "dataloader worker') parser.add_argument('--no_cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--gpu', default=0,", "{} train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader = DataLoader(dataset_validation,", "opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') else: print() print('Activating", "train=True) print('training data size: {}'.format(len(dataset_train))) print('validation data size: {}'.format(len(dataset_validation))) dataset_test", "help='Where to save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where to save", "size: {}'.format(len(dataset_test))) print() kwargs = {'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory} if", "= gpu_setup(opt) dataset_train, dataset_validation = load_dataset(opt, train=True) print('training data size:", "multi-GPU usage') parser.add_argument('--pin_memory', default=True, type=bool, help='pin memory option selector') parser.add_argument('--save_model',", "torch.manual_seed(opt.seed) # ================== GPU SETTINGS ================== # def gpu_setup(opt): use_cuda", "default=False, help='disables CUDA training') parser.add_argument('--gpu', default=0, type=str, help='GPU-id for GPU", "= str(opt.gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') print('Using", "{}'.format(len(dataset_train))) print('validation data size: {}'.format(len(dataset_validation))) dataset_test = load_dataset(opt, train=False) print('test", "= str(opt.multi_gpu) opt.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') else:", "parser.add_argument('--train_method', default='supervised', type=str, help='type of training: supervised(default), unsupervised, reinforce') parser.add_argument('--task',", "train(opt, model, train_dataloader, validation_dataloader) test(opt, model, test_dataloader) if __name__ ==", "print() print('Activating single-gpu training mode') os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.gpu) opt.device =", "opt.multi_gpu != 0: model = torch.nn.DataParallel(model) model.to(opt.device) train(opt, model, train_dataloader,", "parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids for multi-GPU usage') parser.add_argument('--pin_memory', default=True, type=bool,", "if opt.multi_gpu != 0: print() print('Activating multi-gpu training mode') print(opt.multi_gpu)", "===================== IMPORT PYTORCH LIBRARIES ================== # import torch from torch.utils.data", "minibatch') parser.add_argument('--test_batch_size', default=32, type=int, help='size of test-minibatch') # optimizer &", "data size: {}'.format(len(dataset_validation))) dataset_test = load_dataset(opt, train=False) print('test data size:", "help='Where to save Logs') # data setting parser.add_argument('--val_rate', default=0.2, type=float,", "parser.add_argument('--gpu', default=0, type=str, help='GPU-id for GPU to use') parser.add_argument('--multi_gpu', default=0,", "data setting parser.add_argument('--val_rate', default=0.2, type=float, help='split rate for the validation", "test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True, **kwargs) model = load_model(opt) if", "# import torch from torch.utils.data import DataLoader torch.manual_seed(opt.seed) # ==================", "torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"] =\"PCI_BUS_ID\" if opt.multi_gpu != 0: print() print('Activating multi-gpu", "parser.add_argument('--save_model', action='store_true', default=False, help='For Saving the current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights',", "parser.add_argument('--batch_size', default=32, type=int, help='size of minibatch') parser.add_argument('--test_batch_size', default=32, type=int, help='size", "from customs.model import load_model # ================== TRAINING SETTINGS ================== #", "training parameter setting parser.add_argument('--n_epoch', default=10, type=int, help='number of total training", "of dataloader worker') parser.add_argument('--no_cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--gpu',", "parameter setting parser.add_argument('--n_epoch', default=10, type=int, help='number of total training iteration')", "if torch.cuda.is_available() else 'cpu') print('Using gpu number ' + str(opt.gpu))", "type=int, help='number of dataloader worker') parser.add_argument('--no_cuda', action='store_true', default=False, help='disables CUDA", "type=str, help='GPU-ids for multi-GPU usage') parser.add_argument('--pin_memory', default=True, type=bool, help='pin memory", "data size: {}'.format(len(dataset_train))) print('validation data size: {}'.format(len(dataset_validation))) dataset_test = load_dataset(opt,", "type') # training parameter setting parser.add_argument('--n_epoch', default=10, type=int, help='number of", "print('validation data size: {}'.format(len(dataset_validation))) dataset_test = load_dataset(opt, train=False) print('test data", "dataset_train, dataset_validation = load_dataset(opt, train=True) print('training data size: {}'.format(len(dataset_train))) print('validation", "= DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size, shuffle=True,", "classification(default), regression') parser.add_argument('--dataset', default='mnist', type=str, help='dataset to use') parser.add_argument('--model', default='CNN',", "import train, test from customs.dataset import load_dataset from customs.model import", "unsupervised, reinforce') parser.add_argument('--task', default='classification', type=str, help='task of training: classification(default), regression')", "torch.utils.data import DataLoader torch.manual_seed(opt.seed) # ================== GPU SETTINGS ================== #", "use') parser.add_argument('--model', default='CNN', type=str, help='model to use') parser.add_argument('--seed', default=42, type=int,", "of training: classification(default), regression') parser.add_argument('--dataset', default='mnist', type=str, help='dataset to use')", "help='dataset to use') parser.add_argument('--model', default='CNN', type=str, help='model to use') parser.add_argument('--seed',", "model = torch.nn.DataParallel(model) model.to(opt.device) train(opt, model, train_dataloader, validation_dataloader) test(opt, model,", "0: print() print('Activating multi-gpu training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] = str(opt.multi_gpu)", "MAIN SCRIPT ============================= # def main(opt): use_cuda = gpu_setup(opt) dataset_train,", "parser.add_argument('--n_epoch', default=10, type=int, help='number of total training iteration') parser.add_argument('--batch_size', default=32,", "setting parser.add_argument('--n_epoch', default=10, type=int, help='number of total training iteration') parser.add_argument('--batch_size',", "of test-minibatch') # optimizer & scheduler setting parser.add_argument('--lr', default=0.03, type=float,", "# ================== GPU SETTINGS ================== # def gpu_setup(opt): use_cuda =", "default=42, type=int, help='random seed (default: 42)') parser.add_argument('--num_worker', default=1, type=int, help='number", "gpu number ' + str(opt.gpu)) return use_cuda # ======================= MAIN", "import argparse import os parser = argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str,", "train_dataloader = DataLoader(dataset_train, batch_size=opt.batch_size, shuffle=True, **kwargs) validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size,", "print('training data size: {}'.format(len(dataset_train))) print('validation data size: {}'.format(len(dataset_validation))) dataset_test =", "help='pin memory option selector') parser.add_argument('--save_model', action='store_true', default=False, help='For Saving the", "# def gpu_setup(opt): use_cuda = not opt.no_cuda and torch.cuda.is_available() os.environ[\"CUDA_DEVICE_ORDER\"]", "training: supervised(default), unsupervised, reinforce') parser.add_argument('--task', default='classification', type=str, help='task of training:", "type=str, help='type of training: supervised(default), unsupervised, reinforce') parser.add_argument('--task', default='classification', type=str,", "type=str, help='task of training: classification(default), regression') parser.add_argument('--dataset', default='mnist', type=str, help='dataset", "default=0.2, type=float, help='split rate for the validation data') parser.add_argument('--transform', default='default',", "help='number of total training iteration') parser.add_argument('--batch_size', default=32, type=int, help='size of", "argparse.ArgumentParser() parser.add_argument('--train_method', default='supervised', type=str, help='type of training: supervised(default), unsupervised, reinforce')", "training') parser.add_argument('--gpu', default=0, type=str, help='GPU-id for GPU to use') parser.add_argument('--multi_gpu',", "================== # def gpu_setup(opt): use_cuda = not opt.no_cuda and torch.cuda.is_available()", "dataset_test = load_dataset(opt, train=False) print('test data size: {}'.format(len(dataset_test))) print() kwargs", "type=str, help='scheduler select') opt = parser.parse_args() # ===================== IMPORT PYTORCH", "parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where to save weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str,", "import DataLoader torch.manual_seed(opt.seed) # ================== GPU SETTINGS ================== # def", "regression') parser.add_argument('--dataset', default='mnist', type=str, help='dataset to use') parser.add_argument('--model', default='CNN', type=str,", "for GPU to use') parser.add_argument('--multi_gpu', default=0, type=str, help='GPU-ids for multi-GPU", "CUSTOM LEARNING LIBRARIES ===================== # from customs.train import train, test", "parser.add_argument('--val_rate', default=0.2, type=float, help='split rate for the validation data') parser.add_argument('--transform',", "default=False, help='For Saving the current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str, help='Where", "rate') parser.add_argument('--optimizer', default='adam', type=str, help='optimizer select') parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler", "torch.nn.DataParallel(model) model.to(opt.device) train(opt, model, train_dataloader, validation_dataloader) test(opt, model, test_dataloader) if", "parser.parse_args() # ===================== IMPORT PYTORCH LIBRARIES ================== # import torch", "TRAINING SETTINGS ================== # import argparse import os parser =", "import load_dataset from customs.model import load_model # ================== TRAINING SETTINGS", "parser.add_argument('--optimizer', default='adam', type=str, help='optimizer select') parser.add_argument('--scheduler', default='steplr', type=str, help='scheduler select')", "parser.add_argument('--seed', default=42, type=int, help='random seed (default: 42)') parser.add_argument('--num_worker', default=1, type=int,", "validation_dataloader = DataLoader(dataset_validation, batch_size=opt.batch_size, shuffle=True, **kwargs) test_dataloader = DataLoader(dataset_test, batch_size=opt.test_batch_size,", "action='store_true', default=False, help='For Saving the current Model') parser.add_argument('--save_path', default=os.getcwd()+'/weights', type=str,", "!= 0: print() print('Activating multi-gpu training mode') print(opt.multi_gpu) os.environ['CUDA_VISIBLE_DEVICES'] =", "help='training learning rate') parser.add_argument('--optimizer', default='adam', type=str, help='optimizer select') parser.add_argument('--scheduler', default='steplr',", "type=bool, help='pin memory option selector') parser.add_argument('--save_model', action='store_true', default=False, help='For Saving", "weights') parser.add_argument('--log_path', default=os.getcwd()+'/Logs', type=str, help='Where to save Logs') # data", "print() kwargs = {'num_workers': opt.num_worker, 'pin_memory': opt.pin_memory} if use_cuda else", "opt = parser.parse_args() # ===================== IMPORT PYTORCH LIBRARIES ================== #", "from customs.train import train, test from customs.dataset import load_dataset from", "type=str, help='dataset to use') parser.add_argument('--model', default='CNN', type=str, help='model to use')", "0: model = torch.nn.DataParallel(model) model.to(opt.device) train(opt, model, train_dataloader, validation_dataloader) test(opt," ]
[ "File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL,", "\".\", \"-database ./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct 1\", \"-pert_num 2\", \"-inner_num", "* logging.basicConfig(level=logging.DEBUG) # --- Work Dir Setup ----------------------------------------------------------- RUN_ID =", "fileServers: - operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\" url:", "= ReplicaCatalog() # add all files in minirosetta_database inputs =", "rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir()) for", "props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\")", "- name: \"local\" arch: \"x86_64\" os.type: \"linux\" os.release: \"rhel\" os.version:", "directories: - type: \"sharedScratch\" path: \"{work_dir}/scratch\" fileServers: - operation: \"all\"", "jobs with (TOP_DIR / \"replicas.yml\").open(\"w\") as f: rc.write(f) wf.add_transformation_catalog(tc) try:", "as e: print(\"Unable to find pegasus-config\") assert pegasus_config.returncode == 0", "files in minirosetta_database inputs = list() def get_files(d: Path) ->", "with (TOP_DIR / \"sites.yml\").open(mode=\"w\") as f: f.write(sites) # --- Transformations", "f.write(sites) # --- Transformations ---------------------------------------------------------- rosetta_exe = Transformation( \"rosetta.exe\", arch=Arch.X86_64,", "\"vanilla\" pegasus: style: \"condor\" - name: \"staging_site\" arch: \"x86_64\" os.type:", "import Path from datetime import datetime from Pegasus.api import *", "{}\".format(TOP_DIR / \"pegasus.properties\")) props = Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"]", "except FileExistsError: pass # --- Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties at:", "if current_file.is_file(): job = ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs)", "(TOP_DIR / \"replicas.yml\").open(\"w\") as f: rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR),", "tc = TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas & Workflow ------------------------------------------------------ rc", "path: \"{work_dir}/outputs\" fileServers: - operation: \"all\" url: \"file://{work_dir}/outputs\" profiles: env:", "pdb_files = list(Path(\"pdbs\").iterdir()) for i in range(10): current_file = pdb_files[i]", "Replicas & Workflow ------------------------------------------------------ rc = ReplicaCatalog() # add all", "subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except FileNotFoundError as e:", "profiles: condor: universe: \"vanilla\" pegasus: style: \"condor\" - name: \"staging_site\"", "\"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with", "\"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR / \"sites.yml\").open(mode=\"w\") as f:", "to separate file for registration jobs with (TOP_DIR / \"replicas.yml\").open(\"w\")", "print(\"Unable to find pegasus-config\") assert pegasus_config.returncode == 0 PEGASUS_BIN_DIR =", "-------------------------------------------------------------------- print(\"Generating site catalog at: sites.yml\") LOCAL = \"local\" CONDOR_POOL", "\"nonsharedfs\" with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as f: props.write(f) # ---", "2\", \"-inner_num 1\", \"-jd2::ntrials 1\", ) ) rc.add_replica(\"local\", current_file.name, str(current_file.resolve()))", "\"x86_64\" os.type: \"linux\" profiles: condor: universe: \"vanilla\" pegasus: style: \"condor\"", "i in range(10): current_file = pdb_files[i] if current_file.is_file(): job =", "rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve()))", "datetime import datetime from Pegasus.api import * logging.basicConfig(level=logging.DEBUG) # ---", "pegasus: style: \"condor\" - name: \"staging_site\" arch: \"x86_64\" os.type: \"linux\"", ") with (TOP_DIR / \"sites.yml\").open(mode=\"w\") as f: f.write(sites) # ---", "+ str(TOP_DIR / \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe) #", "\"-in:file:s\", current_file.name, \"-out:prefix \" + current_file.name + \".\", \"-database ./minirosetta_database\",", "= Path.cwd() WORK_DIR = TOP_DIR / \"work\" try: Path.mkdir(WORK_DIR) except", "pegasus.properties at: {}\".format(TOP_DIR / \"pegasus.properties\")) props = Properties() props[\"pegasus.dir.useTimestamp\"] =", "\"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR / \"sites.yml\").open(mode=\"w\") as", "f1 = File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\")", "= File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\") pdb_files", "props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as f: props.write(f)", "f: props.write(f) # --- Sites -------------------------------------------------------------------- print(\"Generating site catalog at:", "in d.iterdir(): if p.is_file(): f = File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p),", "\"local\" CONDOR_POOL = \"condorpool\" STAGING_SITE = \"staging_site\" try: pegasus_config =", "\"{work_dir}/outputs\" fileServers: - operation: \"all\" url: \"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR:", "\"staging_site\" try: pegasus_config = subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE )", "\"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except FileNotFoundError as e: print(\"Unable to", "datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd() WORK_DIR = TOP_DIR / \"work\" try:", ".add_outputs(File(current_file.name + \".score.sc\"), register_replica=True) .add_args( \"-in:file:s\", current_file.name, \"-out:prefix \" +", "\"rhel\" os.version: \"7\" directories: - type: \"sharedScratch\" path: \"{work_dir}/scratch\" fileServers:", "dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE}, ) except PegasusClientError as e:", "= \"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as", "Dir Setup ----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR =", "inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir())", "wf = Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir()) for i in range(10):", "----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd() WORK_DIR", "from Pegasus.api import * logging.basicConfig(level=logging.DEBUG) # --- Work Dir Setup", "condor: universe: \"vanilla\" pegasus: style: \"condor\" - name: \"staging_site\" arch:", "None: for p in d.iterdir(): if p.is_file(): f = File(str(p))", "\"condor_pool\" arch: \"x86_64\" os.type: \"linux\" profiles: condor: universe: \"vanilla\" pegasus:", "sites.yml\") LOCAL = \"local\" CONDOR_POOL = \"condorpool\" STAGING_SITE = \"staging_site\"", "- operation: \"all\" url: \"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format(", "\"linux\" profiles: condor: universe: \"vanilla\" pegasus: style: \"condor\" - name:", "\"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name:", "current_file.name + \".\", \"-database ./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct 1\", \"-pert_num", "\"-out:prefix \" + current_file.name + \".\", \"-database ./minirosetta_database\", \"-linmem_ig 10\",", "Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir()) for i in range(10): current_file =", "stderr=subprocess.PIPE ) except FileNotFoundError as e: print(\"Unable to find pegasus-config\")", "path: \"{work_dir}/scratch\" fileServers: - operation: \"all\" url: \"file://{work_dir}/scratch\" - type:", "as f: f.write(sites) # --- Transformations ---------------------------------------------------------- rosetta_exe = Transformation(", "name: \"staging_site\" arch: \"x86_64\" os.type: \"linux\" directories: - type: \"sharedScratch\"", "/ \"pegasus.properties\")) props = Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"] =", "= File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\")) f1", "= pdb_files[i] if current_file.is_file(): job = ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\"))", "\"linux\" os.release: \"rhel\" os.version: \"7\" directories: - type: \"sharedScratch\" path:", "job = ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name +", "at: sites.yml\") LOCAL = \"local\" CONDOR_POOL = \"condorpool\" STAGING_SITE =", "url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\"", "\"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd() WORK_DIR = TOP_DIR /", "import subprocess from pathlib import Path from datetime import datetime", "Workflow ------------------------------------------------------ rc = ReplicaCatalog() # add all files in", "python3 import logging import sys import subprocess from pathlib import", "- name: \"condor_pool\" arch: \"x86_64\" os.type: \"linux\" profiles: condor: universe:", "= pegasus_config.stdout.decode().strip() sites = \"\"\" pegasus: \"5.0\" sites: - name:", "name: \"local\" arch: \"x86_64\" os.type: \"linux\" os.release: \"rhel\" os.version: \"7\"", "\"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" -", "STAGING_SITE = \"staging_site\" try: pegasus_config = subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE,", "arch: \"x86_64\" os.type: \"linux\" profiles: condor: universe: \"vanilla\" pegasus: style:", "- name: \"staging_site\" arch: \"x86_64\" os.type: \"linux\" directories: - type:", "<reponame>ahnitz/pegasus #!/usr/bin/env python3 import logging import sys import subprocess from", "minirosetta_database inputs = list() def get_files(d: Path) -> None: for", "str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1,", "+ current_file.name + \".\", \"-database ./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct 1\",", "pathlib import Path from datetime import datetime from Pegasus.api import", "\"-jd2::ntrials 1\", ) ) rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job) # write", "\"-database ./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct 1\", \"-pert_num 2\", \"-inner_num 1\",", "style: \"condor\" - name: \"staging_site\" arch: \"x86_64\" os.type: \"linux\" directories:", "[\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except FileNotFoundError as e: print(\"Unable", "with (TOP_DIR / \"replicas.yml\").open(\"w\") as f: rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan(", "url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\" arch: \"x86_64\" os.type: \"linux\" os.release:", "_id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name + \".score.sc\"), register_replica=True) .add_args( \"-in:file:s\",", "1\", ) ) rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job) # write rc", "add all files in minirosetta_database inputs = list() def get_files(d:", "\"condorpool\" STAGING_SITE = \"staging_site\" try: pegasus_config = subprocess.run( [\"pegasus-config\", \"--bin\"],", "props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with", "= subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except FileNotFoundError as", "Path from datetime import datetime from Pegasus.api import * logging.basicConfig(level=logging.DEBUG)", "sites: - name: \"condor_pool\" arch: \"x86_64\" os.type: \"linux\" profiles: condor:", "+ \".score.sc\"), register_replica=True) .add_args( \"-in:file:s\", current_file.name, \"-out:prefix \" + current_file.name", "operation: \"all\" url: \"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR),", "WORK_DIR = TOP_DIR / \"work\" try: Path.mkdir(WORK_DIR) except FileExistsError: pass", "to find pegasus-config\") assert pegasus_config.returncode == 0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip()", "\"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\"", "\"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\" arch: \"x86_64\" os.type: \"linux\"", "rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE}, )", "= Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir()) for i in range(10): current_file", "work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR / \"sites.yml\").open(mode=\"w\") as f: f.write(sites)", "pdb_files[i] if current_file.is_file(): job = ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name),", "Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\" + str(TOP_DIR / \"rosetta.exe\"),", "os.release: \"rhel\" os.version: \"7\" directories: - type: \"sharedScratch\" path: \"{work_dir}/scratch\"", "\"-inner_num 1\", \"-jd2::ntrials 1\", ) ) rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job)", ") except FileNotFoundError as e: print(\"Unable to find pegasus-config\") assert", "\"5.0\" sites: - name: \"condor_pool\" arch: \"x86_64\" os.type: \"linux\" profiles:", "\"{work_dir}/scratch\" fileServers: - operation: \"all\" url: \"file://{work_dir}/scratch\" - type: \"localStorage\"", "if p.is_file(): f = File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve())) else:", "= \"local\" CONDOR_POOL = \"condorpool\" STAGING_SITE = \"staging_site\" try: pegasus_config", "\"localStorage\" path: \"{work_dir}/outputs\" fileServers: - operation: \"all\" url: \"file://{work_dir}/outputs\" profiles:", "profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR", "== 0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites = \"\"\" pegasus: \"5.0\"", "str(current_file.resolve())) wf.add_jobs(job) # write rc to separate file for registration", "--- Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties at: {}\".format(TOP_DIR / \"pegasus.properties\")) props", "+ \".\", \"-database ./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct 1\", \"-pert_num 2\",", "fileServers: - operation: \"all\" url: \"file://{work_dir}/scratch\" - type: \"localStorage\" path:", "# --- Replicas & Workflow ------------------------------------------------------ rc = ReplicaCatalog() #", "/ \"sites.yml\").open(mode=\"w\") as f: f.write(sites) # --- Transformations ---------------------------------------------------------- rosetta_exe", "Path.mkdir(WORK_DIR) except FileExistsError: pass # --- Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties", "- type: \"localStorage\" path: \"{work_dir}/outputs\" fileServers: - operation: \"all\" url:", "Sites -------------------------------------------------------------------- print(\"Generating site catalog at: sites.yml\") LOCAL = \"local\"", "\" + current_file.name + \".\", \"-database ./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct", "str(TOP_DIR / \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe) # ---", "inputs = list() def get_files(d: Path) -> None: for p", "site=\"local\", pfn=\"file://\" + str(TOP_DIR / \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc =", "1\", \"-jd2::ntrials 1\", ) ) rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job) #", "logging import sys import subprocess from pathlib import Path from", "TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas & Workflow ------------------------------------------------------ rc = ReplicaCatalog()", "path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation:", "# write rc to separate file for registration jobs with", "try: wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE}, ) except PegasusClientError", "\"all\" url: \"file://{work_dir}/scratch\" - type: \"localStorage\" path: \"{work_dir}/outputs\" fileServers: -", "for registration jobs with (TOP_DIR / \"replicas.yml\").open(\"w\") as f: rc.write(f)", "FileExistsError: pass # --- Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties at: {}\".format(TOP_DIR", "- type: \"sharedScratch\" path: \"{work_dir}/scratch\" fileServers: - operation: \"all\" url:", "assert pegasus_config.returncode == 0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites = \"\"\"", "arch: \"x86_64\" os.type: \"linux\" os.release: \"rhel\" os.version: \"7\" directories: -", "& Workflow ------------------------------------------------------ rc = ReplicaCatalog() # add all files", "= \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd() WORK_DIR = TOP_DIR", "os.version: \"7\" directories: - type: \"sharedScratch\" path: \"{work_dir}/scratch\" fileServers: -", "\"work\" try: Path.mkdir(WORK_DIR) except FileExistsError: pass # --- Configuration ------------------------------------------------------------", "operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\" arch: \"x86_64\" os.type:", "\"pegasus.properties\")) props = Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"] = \"false\"", "\"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\" arch:", "ReplicaCatalog() # add all files in minirosetta_database inputs = list()", "props = Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"]", "registration jobs with (TOP_DIR / \"replicas.yml\").open(\"w\") as f: rc.write(f) wf.add_transformation_catalog(tc)", "operation: \"all\" url: \"file://{work_dir}/scratch\" - type: \"localStorage\" path: \"{work_dir}/outputs\" fileServers:", "f: rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE},", "import * logging.basicConfig(level=logging.DEBUG) # --- Work Dir Setup ----------------------------------------------------------- RUN_ID", "p in d.iterdir(): if p.is_file(): f = File(str(p)) inputs.append(f) rc.add_replica(LOCAL,", "current_file.is_file(): job = ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name", "\"x86_64\" os.type: \"linux\" os.release: \"rhel\" os.version: \"7\" directories: - type:", "\"all\" url: \"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR", "rosetta_exe = Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\" + str(TOP_DIR", "directories: - type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation: \"get\"", "\"-nstruct 1\", \"-pert_num 2\", \"-inner_num 1\", \"-jd2::ntrials 1\", ) )", "= list() def get_files(d: Path) -> None: for p in", "sites = \"\"\" pegasus: \"5.0\" sites: - name: \"condor_pool\" arch:", "url: \"file://{work_dir}/scratch\" - type: \"localStorage\" path: \"{work_dir}/outputs\" fileServers: - operation:", "in minirosetta_database inputs = list() def get_files(d: Path) -> None:", "range(10): current_file = pdb_files[i] if current_file.is_file(): job = ( Job(rosetta_exe,", "inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2,", "sys import subprocess from pathlib import Path from datetime import", "in range(10): current_file = pdb_files[i] if current_file.is_file(): job = (", "type: \"sharedScratch\" path: \"{work_dir}/scratch\" fileServers: - operation: \"all\" url: \"file://{work_dir}/scratch\"", "\"replicas.yml\").open(\"w\") as f: rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL],", "f2 = File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\")", "\"file://{work_dir}/scratch\" - type: \"localStorage\" path: \"{work_dir}/outputs\" fileServers: - operation: \"all\"", "Transformations ---------------------------------------------------------- rosetta_exe = Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\"", "find pegasus-config\") assert pegasus_config.returncode == 0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites", "--- Transformations ---------------------------------------------------------- rosetta_exe = Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\",", "wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE}, ) except PegasusClientError as", "= \"condorpool\" STAGING_SITE = \"staging_site\" try: pegasus_config = subprocess.run( [\"pegasus-config\",", "1\", \"-pert_num 2\", \"-inner_num 1\", \"-jd2::ntrials 1\", ) ) rc.add_replica(\"local\",", "# --- Work Dir Setup ----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\" +", "\"condor\" - name: \"staging_site\" arch: \"x86_64\" os.type: \"linux\" directories: -", "/ \"replicas.yml\").open(\"w\") as f: rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR), verbose=5,", "- operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\" arch: \"x86_64\"", "at: {}\".format(TOP_DIR / \"pegasus.properties\")) props = Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\"", "os_type=OS.LINUX, site=\"local\", pfn=\"file://\" + str(TOP_DIR / \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc", "fileServers: - operation: \"all\" url: \"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\"", "from pathlib import Path from datetime import datetime from Pegasus.api", "str(p), str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL,", "inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\")", "\"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" - name: \"local\" arch: \"x86_64\" os.type: \"linux\" os.release: \"rhel\"", "\"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\" + str(TOP_DIR / \"rosetta.exe\"), is_stageable=True,", "import sys import subprocess from pathlib import Path from datetime", "current_file.name, \"-out:prefix \" + current_file.name + \".\", \"-database ./minirosetta_database\", \"-linmem_ig", "pegasus_config.returncode == 0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites = \"\"\" pegasus:", "write rc to separate file for registration jobs with (TOP_DIR", "LOCAL = \"local\" CONDOR_POOL = \"condorpool\" STAGING_SITE = \"staging_site\" try:", "is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas & Workflow", "= TOP_DIR / \"work\" try: Path.mkdir(WORK_DIR) except FileExistsError: pass #", "wf.add_jobs(job) # write rc to separate file for registration jobs", "logging.basicConfig(level=logging.DEBUG) # --- Work Dir Setup ----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\"", "CONDOR_POOL = \"condorpool\" STAGING_SITE = \"staging_site\" try: pegasus_config = subprocess.run(", "env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR /", "props.write(f) # --- Sites -------------------------------------------------------------------- print(\"Generating site catalog at: sites.yml\")", "FileNotFoundError as e: print(\"Unable to find pegasus-config\") assert pegasus_config.returncode ==", "current_file = pdb_files[i] if current_file.is_file(): job = ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\",", "subprocess from pathlib import Path from datetime import datetime from", "\"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as f:", "= \"nonsharedfs\" with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as f: props.write(f) #", "File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\")) f1 =", "type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\"", "\"local\" arch: \"x86_64\" os.type: \"linux\" os.release: \"rhel\" os.version: \"7\" directories:", "Path) -> None: for p in d.iterdir(): if p.is_file(): f", "os.type: \"linux\" os.release: \"rhel\" os.version: \"7\" directories: - type: \"sharedScratch\"", "rc = ReplicaCatalog() # add all files in minirosetta_database inputs", "import datetime from Pegasus.api import * logging.basicConfig(level=logging.DEBUG) # --- Work", "pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR / \"sites.yml\").open(mode=\"w\") as f: f.write(sites) #", "for i in range(10): current_file = pdb_files[i] if current_file.is_file(): job", "def get_files(d: Path) -> None: for p in d.iterdir(): if", "pegasus_config = subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except FileNotFoundError", "pegasus: \"5.0\" sites: - name: \"condor_pool\" arch: \"x86_64\" os.type: \"linux\"", "Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name + \".score.sc\"), register_replica=True) .add_args(", "Work Dir Setup ----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR", "\"staging_site\" arch: \"x86_64\" os.type: \"linux\" directories: - type: \"sharedScratch\" path:", "\"sites.yml\").open(mode=\"w\") as f: f.write(sites) # --- Transformations ---------------------------------------------------------- rosetta_exe =", "list() def get_files(d: Path) -> None: for p in d.iterdir():", "\"pegasus.properties\").open(mode=\"w\") as f: props.write(f) # --- Sites -------------------------------------------------------------------- print(\"Generating site", "wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE}, ) except", "arch: \"x86_64\" os.type: \"linux\" directories: - type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\"", "( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name + \".score.sc\"), register_replica=True)", "/ \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas", "f = File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\"))", "Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\"", "./minirosetta_database\", \"-linmem_ig 10\", \"-nstruct 1\", \"-pert_num 2\", \"-inner_num 1\", \"-jd2::ntrials", "# --- Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties at: {}\".format(TOP_DIR / \"pegasus.properties\"))", "try: Path.mkdir(WORK_DIR) except FileExistsError: pass # --- Configuration ------------------------------------------------------------ print(\"Generating", "datetime from Pegasus.api import * logging.basicConfig(level=logging.DEBUG) # --- Work Dir", "#!/usr/bin/env python3 import logging import sys import subprocess from pathlib", "all files in minirosetta_database inputs = list() def get_files(d: Path)", "current_file.name, str(current_file.resolve())) wf.add_jobs(job) # write rc to separate file for", "try: pegasus_config = subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except", "= Properties() props[\"pegasus.dir.useTimestamp\"] = \"true\" props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"] =", "list(Path(\"pdbs\").iterdir()) for i in range(10): current_file = pdb_files[i] if current_file.is_file():", "rc.add_replica(LOCAL, str(p), str(p.resolve())) else: get_files(p) get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\") inputs.append(f1)", "= \"\"\" pegasus: \"5.0\" sites: - name: \"condor_pool\" arch: \"x86_64\"", "pegasus_config.stdout.decode().strip() sites = \"\"\" pegasus: \"5.0\" sites: - name: \"condor_pool\"", "str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir()) for i in", "10\", \"-nstruct 1\", \"-pert_num 2\", \"-inner_num 1\", \"-jd2::ntrials 1\", )", "/ \"pegasus.properties\").open(mode=\"w\") as f: props.write(f) # --- Sites -------------------------------------------------------------------- print(\"Generating", "Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties at: {}\".format(TOP_DIR / \"pegasus.properties\")) props =", "os.type: \"linux\" directories: - type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: -", "*inputs) .add_outputs(File(current_file.name + \".score.sc\"), register_replica=True) .add_args( \"-in:file:s\", current_file.name, \"-out:prefix \"", "= ( Job(rosetta_exe, _id=current_file.name.replace(\".pdb\", \"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name + \".score.sc\"),", "# --- Transformations ---------------------------------------------------------- rosetta_exe = Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX,", ") ) rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job) # write rc to", "\"linux\" directories: - type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation:", "+ datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd() WORK_DIR = TOP_DIR / \"work\"", ".add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name + \".score.sc\"), register_replica=True) .add_args( \"-in:file:s\", current_file.name, \"-out:prefix", "= list(Path(\"pdbs\").iterdir()) for i in range(10): current_file = pdb_files[i] if", "- operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\"", "- type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers: - operation: \"get\" url:", "f1, str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf", "str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf =", "url: \"file://{work_dir}/outputs\" profiles: env: PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR )", "\".score.sc\"), register_replica=True) .add_args( \"-in:file:s\", current_file.name, \"-out:prefix \" + current_file.name +", "File(\"repack.resfile\") inputs.append(f2) rc.add_replica(LOCAL, f2, str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\") pdb_files =", "with (TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as f: props.write(f) # --- Sites", ") rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job) # write rc to separate", "get_files(d: Path) -> None: for p in d.iterdir(): if p.is_file():", "site catalog at: sites.yml\") LOCAL = \"local\" CONDOR_POOL = \"condorpool\"", "TOP_DIR / \"work\" try: Path.mkdir(WORK_DIR) except FileExistsError: pass # ---", ").add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas & Workflow ------------------------------------------------------", "else: get_files(p) get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve()))", "= \"staging_site\" try: pegasus_config = subprocess.run( [\"pegasus-config\", \"--bin\"], stdout=subprocess.PIPE, stderr=subprocess.PIPE", "\"true\" props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with (TOP_DIR /", "d.iterdir(): if p.is_file(): f = File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve()))", "arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\" + str(TOP_DIR / \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3)", "--- Sites -------------------------------------------------------------------- print(\"Generating site catalog at: sites.yml\") LOCAL =", "e: print(\"Unable to find pegasus-config\") assert pegasus_config.returncode == 0 PEGASUS_BIN_DIR", "\"sharedScratch\" path: \"{work_dir}/scratch\" fileServers: - operation: \"all\" url: \"file://{work_dir}/scratch\" -", "--- Work Dir Setup ----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\")", "print(\"Generating site catalog at: sites.yml\") LOCAL = \"local\" CONDOR_POOL =", "-> None: for p in d.iterdir(): if p.is_file(): f =", ".add_args( \"-in:file:s\", current_file.name, \"-out:prefix \" + current_file.name + \".\", \"-database", "\"\")) .add_inputs(File(current_file.name), *inputs) .add_outputs(File(current_file.name + \".score.sc\"), register_replica=True) .add_args( \"-in:file:s\", current_file.name,", "print(\"Generating pegasus.properties at: {}\".format(TOP_DIR / \"pegasus.properties\")) props = Properties() props[\"pegasus.dir.useTimestamp\"]", "file for registration jobs with (TOP_DIR / \"replicas.yml\").open(\"w\") as f:", "separate file for registration jobs with (TOP_DIR / \"replicas.yml\").open(\"w\") as", "\"\"\" pegasus: \"5.0\" sites: - name: \"condor_pool\" arch: \"x86_64\" os.type:", "catalog at: sites.yml\") LOCAL = \"local\" CONDOR_POOL = \"condorpool\" STAGING_SITE", "rc.add_replica(\"local\", current_file.name, str(current_file.resolve())) wf.add_jobs(job) # write rc to separate file", "operation: \"get\" url: \"http://workflow.isi.edu/shared-scratch/ptesting\" - operation: \"put\" url: \"gsiftp://workflow.isi.edu/lizard/scratch-90-days/http-scratch/ptesting\" -", "# add all files in minirosetta_database inputs = list() def", "except FileNotFoundError as e: print(\"Unable to find pegasus-config\") assert pegasus_config.returncode", "\"x86_64\" os.type: \"linux\" directories: - type: \"sharedScratch\" path: \"/lizard/scratch-90-days/http-scratch/ptesting\" fileServers:", "= \"true\" props[\"pegasus.dir.storage.deep\"] = \"false\" props[\"pegasus.data.configuration\"] = \"nonsharedfs\" with (TOP_DIR", "\"7\" directories: - type: \"sharedScratch\" path: \"{work_dir}/scratch\" fileServers: - operation:", "\"-pert_num 2\", \"-inner_num 1\", \"-jd2::ntrials 1\", ) ) rc.add_replica(\"local\", current_file.name,", "p.is_file(): f = File(str(p)) inputs.append(f) rc.add_replica(LOCAL, str(p), str(p.resolve())) else: get_files(p)", "type: \"localStorage\" path: \"{work_dir}/outputs\" fileServers: - operation: \"all\" url: \"file://{work_dir}/outputs\"", "name: \"condor_pool\" arch: \"x86_64\" os.type: \"linux\" profiles: condor: universe: \"vanilla\"", "f: f.write(sites) # --- Transformations ---------------------------------------------------------- rosetta_exe = Transformation( \"rosetta.exe\",", "0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites = \"\"\" pegasus: \"5.0\" sites:", "\"-linmem_ig 10\", \"-nstruct 1\", \"-pert_num 2\", \"-inner_num 1\", \"-jd2::ntrials 1\",", "as f: rc.write(f) wf.add_transformation_catalog(tc) try: wf.plan( dir=str(WORK_DIR), verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL:", "Setup ----------------------------------------------------------- RUN_ID = \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd()", "PEGASUS_BIN_DIR: \"{pegasus_bin_dir}\" \"\"\".format( work_dir=str(WORK_DIR), pegasus_bin_dir=PEGASUS_BIN_DIR ) with (TOP_DIR / \"sites.yml\").open(mode=\"w\")", "pass # --- Configuration ------------------------------------------------------------ print(\"Generating pegasus.properties at: {}\".format(TOP_DIR /", "get_files(p) get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2", "TOP_DIR = Path.cwd() WORK_DIR = TOP_DIR / \"work\" try: Path.mkdir(WORK_DIR)", "universe: \"vanilla\" pegasus: style: \"condor\" - name: \"staging_site\" arch: \"x86_64\"", "from datetime import datetime from Pegasus.api import * logging.basicConfig(level=logging.DEBUG) #", "(TOP_DIR / \"sites.yml\").open(mode=\"w\") as f: f.write(sites) # --- Transformations ----------------------------------------------------------", "\"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas &", "os.type: \"linux\" profiles: condor: universe: \"vanilla\" pegasus: style: \"condor\" -", "get_files(Path(\"minirosetta_database\")) f1 = File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2 =", "---------------------------------------------------------- rosetta_exe = Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\" +", "Pegasus.api import * logging.basicConfig(level=logging.DEBUG) # --- Work Dir Setup -----------------------------------------------------------", "(TOP_DIR / \"pegasus.properties\").open(mode=\"w\") as f: props.write(f) # --- Sites --------------------------------------------------------------------", "--- Replicas & Workflow ------------------------------------------------------ rc = ReplicaCatalog() # add", "rc to separate file for registration jobs with (TOP_DIR /", "pfn=\"file://\" + str(TOP_DIR / \"rosetta.exe\"), is_stageable=True, ).add_pegasus_profile(clusters_size=3) tc = TransformationCatalog().add_transformations(rosetta_exe)", "= File(\"design.resfile\") inputs.append(f1) rc.add_replica(LOCAL, f1, str(Path(\"design.resfile\").resolve())) f2 = File(\"repack.resfile\") inputs.append(f2)", "pegasus-config\") assert pegasus_config.returncode == 0 PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites =", "Path.cwd() WORK_DIR = TOP_DIR / \"work\" try: Path.mkdir(WORK_DIR) except FileExistsError:", "# --- Sites -------------------------------------------------------------------- print(\"Generating site catalog at: sites.yml\") LOCAL", "for p in d.iterdir(): if p.is_file(): f = File(str(p)) inputs.append(f)", "RUN_ID = \"024-sc4-gridftp-http-\" + datetime.now().strftime(\"%s\") TOP_DIR = Path.cwd() WORK_DIR =", "stdout=subprocess.PIPE, stderr=subprocess.PIPE ) except FileNotFoundError as e: print(\"Unable to find", "- operation: \"all\" url: \"file://{work_dir}/scratch\" - type: \"localStorage\" path: \"{work_dir}/outputs\"", "as f: props.write(f) # --- Sites -------------------------------------------------------------------- print(\"Generating site catalog", "verbose=5, sites=[CONDOR_POOL], staging_sites={CONDOR_POOL: STAGING_SITE}, ) except PegasusClientError as e: print(e.output)", "import logging import sys import subprocess from pathlib import Path", "= Transformation( \"rosetta.exe\", arch=Arch.X86_64, os_type=OS.LINUX, site=\"local\", pfn=\"file://\" + str(TOP_DIR /", "= TransformationCatalog().add_transformations(rosetta_exe) # --- Replicas & Workflow ------------------------------------------------------ rc =", "f2, str(Path(\"repack.resfile\").resolve())) wf = Workflow(\"rosetta\") pdb_files = list(Path(\"pdbs\").iterdir()) for i", "PEGASUS_BIN_DIR = pegasus_config.stdout.decode().strip() sites = \"\"\" pegasus: \"5.0\" sites: -", "register_replica=True) .add_args( \"-in:file:s\", current_file.name, \"-out:prefix \" + current_file.name + \".\",", "------------------------------------------------------------ print(\"Generating pegasus.properties at: {}\".format(TOP_DIR / \"pegasus.properties\")) props = Properties()", "/ \"work\" try: Path.mkdir(WORK_DIR) except FileExistsError: pass # --- Configuration", "------------------------------------------------------ rc = ReplicaCatalog() # add all files in minirosetta_database" ]
[ "dyncrit]) xsm, metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho':", "t in range(T - 1): xs.append(sys.step(torch.tensor([0.] * B), xs[-1])) x", "in range(T - 1): xs.append(sys.step(torch.tensor([0.] * B), xs[-1])) x =", "'Smoothing Error: %.3e' % err print('Passed.') if __name__ == '__main__':", "3.]) C = torch.randn(2, 3) dt = 0.04 sys =", "err = float((xsm - x).norm()) assert err < 1e-8, 'Smoothing", "obscrit = GaussianObservationCriterion(torch.ones(2), t, y) dyncrit = GaussianDynamicsCriterion(torch.ones(3), t) #", "= 0.04 sys = LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint')", "torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2), t, y) dyncrit = GaussianDynamicsCriterion(torch.ones(3), t)", "import torch from ceem.opt_criteria import * from ceem.systems import LorenzAttractor", "torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2), t, y) dyncrit", "1 T = 200 xs = [torch.randn(B, 1, 3)] for", "1): xs.append(sys.step(torch.tensor([0.] * B), xs[-1])) x = torch.cat(xs, dim=1).detach() x.requires_grad", "= torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2), t, y) dyncrit = GaussianDynamicsCriterion(torch.ones(3),", "* from ceem.smoother import * from ceem import utils def", "[torch.randn(B, 1, 3)] for t in range(T - 1): xs.append(sys.step(torch.tensor([0.]", "torch.randn(2, 3) dt = 0.04 sys = LorenzAttractor(sigma, rho, beta,", "B), xs[-1])) x = torch.cat(xs, dim=1).detach() x.requires_grad = True y", "* from ceem.systems import LorenzAttractor from ceem.dynamics import * from", "0.01 t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x) obscrit =", "3)] for t in range(T - 1): xs.append(sys.step(torch.tensor([0.] * B),", "True y = sys.observe(0., x).detach() # y += torch.rand_like(y) *", "t, y) dyncrit = GaussianDynamicsCriterion(torch.ones(3), t) # Test GroupSOSCriterion crit", "crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys)", "T = 200 xs = [torch.randn(B, 1, 3)] for t", "xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys) err = float((xsm -", "torch.tensor([28.]) beta = torch.tensor([8. / 3.]) C = torch.randn(2, 3)", "C = torch.randn(2, 3) dt = 0.04 sys = LorenzAttractor(sigma,", "xs[-1])) x = torch.cat(xs, dim=1).detach() x.requires_grad = True y =", "3) dt = 0.04 sys = LorenzAttractor(sigma, rho, beta, C,", "sys.observe(0., x).detach() # y += torch.rand_like(y) * 0.01 t =", "- x).norm()) assert err < 1e-8, 'Smoothing Error: %.3e' %", "ceem.smoother import * from ceem import utils def test_smoother(): utils.set_rng_seed(1)", "+= torch.rand_like(y) * 0.01 t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 =", "dim=1).detach() x.requires_grad = True y = sys.observe(0., x).detach() # y", "range(T - 1): xs.append(sys.step(torch.tensor([0.] * B), xs[-1])) x = torch.cat(xs,", "< 1e-8, 'Smoothing Error: %.3e' % err print('Passed.') # Test", "# Test BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics =", "% err print('Passed.') # Test BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit])", "torch from ceem.opt_criteria import * from ceem.systems import LorenzAttractor from", "= torch.tensor([8. / 3.]) C = torch.randn(2, 3) dt =", "2, 'tr_rho': 0.}) err = float((xsm - x).norm()) assert err", "torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2), t, y)", "torch.cat(xs, dim=1).detach() x.requires_grad = True y = sys.observe(0., x).detach() #", "NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.}) err = float((xsm", "x).norm()) assert err < 1e-8, 'Smoothing Error: %.3e' % err", "/ 3.]) C = torch.randn(2, 3) dt = 0.04 sys", "beta = torch.tensor([8. / 3.]) C = torch.randn(2, 3) dt", "y = sys.observe(0., x).detach() # y += torch.rand_like(y) * 0.01", "Test GroupSOSCriterion crit = GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(x0,", "= sys.observe(0., x).detach() # y += torch.rand_like(y) * 0.01 t", "t) # Test GroupSOSCriterion crit = GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics", "= torch.cat(xs, dim=1).detach() x.requires_grad = True y = sys.observe(0., x).detach()", "crit = GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(x0, crit, sys,", "metrics = NLSsmoother(torch.zeros_like(x), crit, sys) err = float((xsm - x).norm())", "'tr_rho': 0.}) err = float((xsm - x).norm()) assert err <", "print('Passed.') # Test BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics", "1, 3)] for t in range(T - 1): xs.append(sys.step(torch.tensor([0.] *", "torch.tensor([8. / 3.]) C = torch.randn(2, 3) dt = 0.04", "torch.rand_like(y) * 0.01 t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x)", "= GaussianObservationCriterion(torch.ones(2), t, y) dyncrit = GaussianDynamicsCriterion(torch.ones(3), t) # Test", "err < 1e-8, 'Smoothing Error: %.3e' % err print('Passed.') if", "assert err < 1e-8, 'Smoothing Error: %.3e' % err print('Passed.')", "ceem.systems import LorenzAttractor from ceem.dynamics import * from ceem.smoother import", "'Smoothing Error: %.3e' % err print('Passed.') # Test BlockSparseGroupSOSCriterion crit", "err print('Passed.') # Test BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm,", "BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys) err =", "GaussianDynamicsCriterion(torch.ones(3), t) # Test GroupSOSCriterion crit = GroupSOSCriterion([obscrit, dyncrit]) xsm,", "GroupSOSCriterion crit = GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(x0, crit,", "float((xsm - x).norm()) assert err < 1e-8, 'Smoothing Error: %.3e'", "= 200 xs = [torch.randn(B, 1, 3)] for t in", "y += torch.rand_like(y) * 0.01 t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0", "= BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys) err", "< 1e-8, 'Smoothing Error: %.3e' % err print('Passed.') if __name__", "= GaussianDynamicsCriterion(torch.ones(3), t) # Test GroupSOSCriterion crit = GroupSOSCriterion([obscrit, dyncrit])", "200 xs = [torch.randn(B, 1, 3)] for t in range(T", "metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.}) err", "import * from ceem import utils def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64)", "= LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint') B = 1", "rho, beta, C, dt, method='midpoint') B = 1 T =", "1e-8, 'Smoothing Error: %.3e' % err print('Passed.') if __name__ ==", "ceem import utils def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.])", "torch.tensor([10.]) rho = torch.tensor([28.]) beta = torch.tensor([8. / 3.]) C", "sys) err = float((xsm - x).norm()) assert err < 1e-8,", "dt, method='midpoint') B = 1 T = 200 xs =", "test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.]) rho = torch.tensor([28.]) beta", "NLSsmoother(torch.zeros_like(x), crit, sys) err = float((xsm - x).norm()) assert err", "B = 1 T = 200 xs = [torch.randn(B, 1,", "# Test GroupSOSCriterion crit = GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics =", "GaussianObservationCriterion(torch.ones(2), t, y) dyncrit = GaussianDynamicsCriterion(torch.ones(3), t) # Test GroupSOSCriterion", "dyncrit = GaussianDynamicsCriterion(torch.ones(3), t) # Test GroupSOSCriterion crit = GroupSOSCriterion([obscrit,", "dt = 0.04 sys = LorenzAttractor(sigma, rho, beta, C, dt,", "dyncrit]) xsm, metrics = NLSsmoother(torch.zeros_like(x), crit, sys) err = float((xsm", "xs.append(sys.step(torch.tensor([0.] * B), xs[-1])) x = torch.cat(xs, dim=1).detach() x.requires_grad =", "import * from ceem.systems import LorenzAttractor from ceem.dynamics import *", "beta, C, dt, method='midpoint') B = 1 T = 200", "= True y = sys.observe(0., x).detach() # y += torch.rand_like(y)", "LorenzAttractor from ceem.dynamics import * from ceem.smoother import * from", "import utils def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.]) rho", "= 1 T = 200 xs = [torch.randn(B, 1, 3)]", "x.requires_grad = True y = sys.observe(0., x).detach() # y +=", "rho = torch.tensor([28.]) beta = torch.tensor([8. / 3.]) C =", "from ceem import utils def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma =", "= torch.tensor([28.]) beta = torch.tensor([8. / 3.]) C = torch.randn(2,", "= torch.randn(2, 3) dt = 0.04 sys = LorenzAttractor(sigma, rho,", "%.3e' % err print('Passed.') # Test BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit,", "ceem.dynamics import * from ceem.smoother import * from ceem import", "sigma = torch.tensor([10.]) rho = torch.tensor([28.]) beta = torch.tensor([8. /", "ceem.opt_criteria import * from ceem.systems import LorenzAttractor from ceem.dynamics import", "BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(torch.zeros_like(x), crit,", "from ceem.dynamics import * from ceem.smoother import * from ceem", "utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.]) rho = torch.tensor([28.]) beta =", "sys = LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint') B =", "Error: %.3e' % err print('Passed.') # Test BlockSparseGroupSOSCriterion crit =", "LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint') B = 1 T", "* from ceem import utils def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma", "0.04 sys = LorenzAttractor(sigma, rho, beta, C, dt, method='midpoint') B", "* B), xs[-1])) x = torch.cat(xs, dim=1).detach() x.requires_grad = True", "= float((xsm - x).norm()) assert err < 1e-8, 'Smoothing Error:", "from ceem.systems import LorenzAttractor from ceem.dynamics import * from ceem.smoother", "torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.]) rho = torch.tensor([28.]) beta = torch.tensor([8.", "sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.}) err = float((xsm - x).norm())", "1e-8, 'Smoothing Error: %.3e' % err print('Passed.') # Test BlockSparseGroupSOSCriterion", "import * from ceem.smoother import * from ceem import utils", "# y += torch.rand_like(y) * 0.01 t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype())", "from ceem.smoother import * from ceem import utils def test_smoother():", "x = torch.cat(xs, dim=1).detach() x.requires_grad = True y = sys.observe(0.,", "= torch.tensor([10.]) rho = torch.tensor([28.]) beta = torch.tensor([8. / 3.])", "for t in range(T - 1): xs.append(sys.step(torch.tensor([0.] * B), xs[-1]))", "def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.]) rho = torch.tensor([28.])", "= torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2), t,", "- 1): xs.append(sys.step(torch.tensor([0.] * B), xs[-1])) x = torch.cat(xs, dim=1).detach()", "import LorenzAttractor from ceem.dynamics import * from ceem.smoother import *", "xsm, metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.})", "Error: %.3e' % err print('Passed.') if __name__ == '__main__': test_smoother()", "x).detach() # y += torch.rand_like(y) * 0.01 t = torch.stack([torch.arange(T),", "x0 = torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2), t, y) dyncrit =", "from ceem.opt_criteria import * from ceem.systems import LorenzAttractor from ceem.dynamics", "Test BlockSparseGroupSOSCriterion crit = BlockSparseGroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(torch.zeros_like(x),", "method='midpoint') B = 1 T = 200 xs = [torch.randn(B,", "<reponame>sisl/CEEM import torch from ceem.opt_criteria import * from ceem.systems import", "t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x) obscrit = GaussianObservationCriterion(torch.ones(2),", "y) dyncrit = GaussianDynamicsCriterion(torch.ones(3), t) # Test GroupSOSCriterion crit =", "solver_kwargs={'verbose': 2, 'tr_rho': 0.}) err = float((xsm - x).norm()) assert", "GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2,", "* 0.01 t = torch.stack([torch.arange(T), torch.arange(T)]).to(torch.get_default_dtype()) x0 = torch.zeros_like(x) obscrit", "crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.}) err = float((xsm -", "C, dt, method='midpoint') B = 1 T = 200 xs", "0.}) err = float((xsm - x).norm()) assert err < 1e-8,", "= GroupSOSCriterion([obscrit, dyncrit]) xsm, metrics = NLSsmoother(x0, crit, sys, solver_kwargs={'verbose':", "= [torch.randn(B, 1, 3)] for t in range(T - 1):", "= NLSsmoother(torch.zeros_like(x), crit, sys) err = float((xsm - x).norm()) assert", "= NLSsmoother(x0, crit, sys, solver_kwargs={'verbose': 2, 'tr_rho': 0.}) err =", "crit, sys) err = float((xsm - x).norm()) assert err <", "xs = [torch.randn(B, 1, 3)] for t in range(T -", "utils def test_smoother(): utils.set_rng_seed(1) torch.set_default_dtype(torch.float64) sigma = torch.tensor([10.]) rho =", "err < 1e-8, 'Smoothing Error: %.3e' % err print('Passed.') #" ]
[ "or 'no-backend' self._dt = dt self._chan_freq_map = channel_frequency_map or dict()", "= temp_val.real # load qubit channel mapping qubit_channel_map = defaultdict(list)", "instance. \"\"\" configuration = backend.configuration() defaults = backend.defaults() # load", "in the root directory # of this source tree or", "return self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of backend that", "works of this code must retain this # copyright notice,", "at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of", "must retain this # copyright notice, and modified files need", "from qiskit.providers import BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend information to", "range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError: pass return OpenPulseBackendInfo(name=name, dt=dt,", "provide the factory method which conforms to the associated provider.", "of Qiskit. # # (C) Copyright IBM 2020. # #", "self.backend_name = name or 'no-backend' self._dt = dt self._chan_freq_map =", "a pulse program without any specific information. Reference: - [1]", "backend.defaults() # load name name = backend.name() # load cycle", "load cycle time dt = configuration.dt # load frequencies chan_freqs", "the system cycle time. If those information are not provided,", "directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. #", "class may depend on providers, this abstract class has an", "configuration.dt # load frequencies chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind): freq for", "factory method taking backends satisfying OpenPulse specification [1]. This class", "chan_freqs.update({pulse.MeasureChannel(qind): freq for qind, freq in enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers", "or dict() @classmethod @abstractmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a", "for simulator backend that only has a device Hamiltonian information.", "return self._dt def get_qubit_index(self, chan: pulse.channels.Channel) -> Union[int, None]: \"\"\"Get", "data and the drawer illustrates a pulse program without any", "Apache License, Version 2.0. You may # obtain a copy", "load frequencies chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind): freq for qind, freq", "# load qubit channel mapping qubit_channel_map = defaultdict(list) for qind", "backend.configuration() defaults = backend.defaults() # load name name = backend.name()", "Optional[str] = None, dt: Optional[float] = None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]]", "None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of backend that conforms to", "This class can be also initialized without the factory method", "for channel/qubit and channel/frequency along with the system cycle time.", "If those information are not provided, this class will be", "qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] = None): \"\"\"Create new backend information. Args:", "qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError:", "create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class with backend information provided", "factory method which conforms to the associated provider. By default", "data generation.\"\"\" def __init__(self, name: Optional[str] = None, dt: Optional[float]", "may # obtain a copy of this license in the", "frequency of given channel object.\"\"\" return self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo):", "qiskit import pulse from qiskit.providers import BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC):", "qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in range(configuration.n_qubits): try:", "part of Qiskit. # # (C) Copyright IBM 2020. #", "# obtain a copy of this license in the LICENSE.txt", "abstract class :py:class:``DrawerBackendInfo`` with necessary methods to generate drawing objects.", "of given channel object.\"\"\" return self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing", "backend information provided by provider. Args: backend: Backend object. \"\"\"", "qubit_channel_map: Mapping of qubit and associated channels. \"\"\" self.backend_name =", "backend information formatted to generate drawing data. This instance will", "time. If those information are not provided, this class will", "time.\"\"\" return self._dt def get_qubit_index(self, chan: pulse.channels.Channel) -> Union[int, None]:", "return qind return chan.index def get_channel_frequency(self, chan: pulse.channels.Channel) -> Union[float,", "`create_from_backend`. Each subclass should provide the factory method which conforms", "a set of empty data and the drawer illustrates a", "providers, this abstract class has an abstract factory method `create_from_backend`.", "derivative works of this code must retain this # copyright", "# # (C) Copyright IBM 2020. # # This code", "in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError: pass return OpenPulseBackendInfo(name=name,", "OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of backend that conforms to OpenPulse specification.\"\"\"", "pylint: disable=invalid-name \"\"\"A collection of backend information formatted to generate", "backend information. Args: name: Name of the backend. dt: System", "which conforms to the associated provider. By default we provide", "provided by provider. Args: backend: Backend object. Returns: OpenPulseBackendInfo: New", "and associated frequency. qubit_channel_map: Mapping of qubit and associated channels.", "in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for qind, freq in enumerate(defaults.meas_freq_est)}) for", "they have been altered from the originals. # pylint: disable=invalid-name", "Backend object. Returns: OpenPulseBackendInfo: New configured instance. \"\"\" configuration =", "of given channel object.\"\"\" for qind, chans in self._qubit_channel_map.items(): if", "System cycle time. channel_frequency_map: Mapping of channel and associated frequency.", "originals. # pylint: disable=invalid-name \"\"\"A collection of backend information formatted", "generation.\"\"\" def __init__(self, name: Optional[str] = None, dt: Optional[float] =", "class :py:class:``DrawerBackendInfo`` with necessary methods to generate drawing objects. Because", "from typing import Dict, List, Union, Optional from qiskit import", "\"\"\" raise NotImplementedError @property def dt(self): \"\"\"Return cycle time.\"\"\" return", "for qind, freq in enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers in enumerate(configuration.u_channel_lo):", "backend that conforms to OpenPulse specification.\"\"\" @classmethod def create_from_backend(cls, backend:", "abc import ABC, abstractmethod from collections import defaultdict from typing", "self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of backend that conforms", "backends satisfying OpenPulse specification [1]. This class can be also", "information of backend that conforms to OpenPulse specification.\"\"\" @classmethod def", "dt(self): \"\"\"Return cycle time.\"\"\" return self._dt def get_qubit_index(self, chan: pulse.channels.Channel)", "with a set of empty data and the drawer illustrates", "def dt(self): \"\"\"Return cycle time.\"\"\" return self._dt def get_qubit_index(self, chan:", "chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind): freq for qind, freq in enumerate(defaults.qubit_freq_est)})", "= channel_frequency_map or dict() self._qubit_channel_map = qubit_channel_map or dict() @classmethod", "file in the root directory # of this source tree", "self._qubit_channel_map = qubit_channel_map or dict() @classmethod @abstractmethod def create_from_backend(cls, backend:", "with backend information provided by provider. Args: backend: Backend object.", "abstract class has an abstract factory method `create_from_backend`. Each subclass", "enumerate(configuration.u_channel_lo): temp_val = .0 + .0j for u_lo_mapper in u_lo_mappers:", "generate drawing data. This instance will be provided to generator", "factory method by manually specifying required information. This may be", "object. \"\"\" raise NotImplementedError @property def dt(self): \"\"\"Return cycle time.\"\"\"", "a notice indicating # that they have been altered from", "dict() self._qubit_channel_map = qubit_channel_map or dict() @classmethod @abstractmethod def create_from_backend(cls,", "copyright notice, and modified files need to carry a notice", "Reference: - [1] Qiskit Backend Specifications for OpenQASM and OpenPulse", "get_channel_frequency(self, chan: pulse.channels.Channel) -> Union[float, None]: \"\"\"Get frequency of given", "channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] = None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] = None):", "New configured instance. \"\"\" configuration = backend.configuration() defaults = backend.defaults()", "self._qubit_channel_map.items(): if chan in chans: return qind return chan.index def", "or derivative works of this code must retain this #", "def get_channel_frequency(self, chan: pulse.channels.Channel) -> Union[float, None]: \"\"\"Get frequency of", "initialized without the factory method by manually specifying required information.", "dict() @classmethod @abstractmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class", "= name or 'no-backend' self._dt = dt self._chan_freq_map = channel_frequency_map", "channel_frequency_map or dict() self._qubit_channel_map = qubit_channel_map or dict() @classmethod @abstractmethod", "__init__(self, name: Optional[str] = None, dt: Optional[float] = None, channel_frequency_map:", "of channel and associated frequency. qubit_channel_map: Mapping of qubit and", "Backend object. \"\"\" raise NotImplementedError @property def dt(self): \"\"\"Return cycle", "to the associated provider. By default we provide :py:class:``OpenPulseBackendInfo`` class", "program without any specific information. Reference: - [1] Qiskit Backend", "= configuration.dt # load frequencies chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind): freq", "# load cycle time dt = configuration.dt # load frequencies", "two mapping objects for channel/qubit and channel/frequency along with the", "information. Args: name: Name of the backend. dt: System cycle", "by manually specifying required information. This may be convenient for", "altered from the originals. # pylint: disable=invalid-name \"\"\"A collection of", "cycle time. If those information are not provided, this class", "that they have been altered from the originals. # pylint:", "provider. By default we provide :py:class:``OpenPulseBackendInfo`` class that has the", "Any modifications or derivative works of this code must retain", "in chans: return qind return chan.index def get_channel_frequency(self, chan: pulse.channels.Channel)", "modifications or derivative works of this code must retain this", "an abstract class :py:class:``DrawerBackendInfo`` with necessary methods to generate drawing", ":py:class:``DrawerBackendInfo`` with necessary methods to generate drawing objects. Because the", "files need to carry a notice indicating # that they", "= None): \"\"\"Create new backend information. Args: name: Name of", "channel and associated frequency. qubit_channel_map: Mapping of qubit and associated", "channel/qubit and channel/frequency along with the system cycle time. If", "be provided to generator functions. The module provides an abstract", "and modified files need to carry a notice indicating #", ".0 + .0j for u_lo_mapper in u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q]", "default we provide :py:class:``OpenPulseBackendInfo`` class that has the factory method", "manually specifying required information. This may be convenient for visualizing", "initialized with a set of empty data and the drawer", "\"\"\" from abc import ABC, abstractmethod from collections import defaultdict", "information to be used for the drawing data generation.\"\"\" def", "class DrawerBackendInfo(ABC): \"\"\"Backend information to be used for the drawing", "information provided by provider. Args: backend: Backend object. \"\"\" raise", "OpenPulseBackendInfo: New configured instance. \"\"\" configuration = backend.configuration() defaults =", "for the drawing data generation.\"\"\" def __init__(self, name: Optional[str] =", "typing import Dict, List, Union, Optional from qiskit import pulse", "information. Reference: - [1] Qiskit Backend Specifications for OpenQASM and", "in self._qubit_channel_map.items(): if chan in chans: return qind return chan.index", "class will be initialized with a set of empty data", "the associated provider. By default we provide :py:class:``OpenPulseBackendInfo`` class that", "be used for the drawing data generation.\"\"\" def __init__(self, name:", "a copy of this license in the LICENSE.txt file in", "# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # #", "without any specific information. Reference: - [1] Qiskit Backend Specifications", "name: Name of the backend. dt: System cycle time. channel_frequency_map:", "Optional[Dict[pulse.channels.Channel, float]] = None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] = None): \"\"\"Create", "Mapping of channel and associated frequency. qubit_channel_map: Mapping of qubit", "visualizing a pulse program for simulator backend that only has", "an abstract factory method `create_from_backend`. Each subclass should provide the", "have been altered from the originals. # pylint: disable=invalid-name \"\"\"A", "u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real #", "freq for qind, freq in enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers in", "illustrates a pulse program without any specific information. Reference: -", "\"\"\"Initialize a class with backend information provided by provider. Args:", "# # This code is licensed under the Apache License,", "enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for qind, freq in enumerate(defaults.meas_freq_est)}) for qind,", "Args: name: Name of the backend. dt: System cycle time.", "for qind, u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val = .0 + .0j", "the data structure of backend class may depend on providers,", "cycle time dt = configuration.dt # load frequencies chan_freqs =", "backend that only has a device Hamiltonian information. This requires", "defaultdict(list) for qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in", "for OpenQASM and OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from abc import", "You may # obtain a copy of this license in", "for qind, chans in self._qubit_channel_map.items(): if chan in chans: return", "defaults = backend.defaults() # load name name = backend.name() #", "# # Any modifications or derivative works of this code", "channel_frequency_map: Mapping of channel and associated frequency. qubit_channel_map: Mapping of", "we provide :py:class:``OpenPulseBackendInfo`` class that has the factory method taking", "= dict() chan_freqs.update({pulse.DriveChannel(qind): freq for qind, freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind):", "carry a notice indicating # that they have been altered", "to OpenPulse specification.\"\"\" @classmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a", "temp_val = .0 + .0j for u_lo_mapper in u_lo_mappers: temp_val", "class can be also initialized without the factory method by", "notice indicating # that they have been altered from the", "Union[int, None]: \"\"\"Get associated qubit index of given channel object.\"\"\"", "collections import defaultdict from typing import Dict, List, Union, Optional", "abstract factory method `create_from_backend`. Each subclass should provide the factory", "without the factory method by manually specifying required information. This", "the factory method by manually specifying required information. This may", "this license in the LICENSE.txt file in the root directory", "information formatted to generate drawing data. This instance will be", "chan_freqs.update({pulse.DriveChannel(qind): freq for qind, freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for", "the root directory # of this source tree or at", "= None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] = None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]]", "dt: System cycle time. channel_frequency_map: Mapping of channel and associated", "BaseBackend): \"\"\"Initialize a class with backend information provided by provider.", "for qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in range(configuration.n_qubits):", "information are not provided, this class will be initialized with", "qiskit.providers import BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend information to be", "import defaultdict from typing import Dict, List, Union, Optional from", "the Apache License, Version 2.0. You may # obtain a", "in u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real", "associated channels. \"\"\" self.backend_name = name or 'no-backend' self._dt =", "@abstractmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class with backend", "has the factory method taking backends satisfying OpenPulse specification [1].", "OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from abc import ABC, abstractmethod from", "conforms to the associated provider. By default we provide :py:class:``OpenPulseBackendInfo``", "retain this # copyright notice, and modified files need to", "load qubit channel mapping qubit_channel_map = defaultdict(list) for qind in", "qind return chan.index def get_channel_frequency(self, chan: pulse.channels.Channel) -> Union[float, None]:", "the factory method which conforms to the associated provider. By", "OpenPulse specification.\"\"\" @classmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class", "List, Union, Optional from qiskit import pulse from qiskit.providers import", "that conforms to OpenPulse specification.\"\"\" @classmethod def create_from_backend(cls, backend: BaseBackend):", "Mapping of qubit and associated channels. \"\"\" self.backend_name = name", "channels. \"\"\" self.backend_name = name or 'no-backend' self._dt = dt", "provider. Args: backend: Backend object. \"\"\" raise NotImplementedError @property def", "Union[float, None]: \"\"\"Get frequency of given channel object.\"\"\" return self._chan_freq_map.get(chan,", "dt = configuration.dt # load frequencies chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind):", "BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend information to be used for", "taking backends satisfying OpenPulse specification [1]. This class can be", "license in the LICENSE.txt file in the root directory #", "Specifications for OpenQASM and OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from abc", "This instance will be provided to generator functions. The module", "data. This instance will be provided to generator functions. The", "will be provided to generator functions. The module provides an", "set of empty data and the drawer illustrates a pulse", "is licensed under the Apache License, Version 2.0. You may", "with necessary methods to generate drawing objects. Because the data", "modified files need to carry a notice indicating # that", "can be also initialized without the factory method by manually", "qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError: pass return OpenPulseBackendInfo(name=name, dt=dt, channel_frequency_map=chan_freqs, qubit_channel_map=qubit_channel_map)", "need to carry a notice indicating # that they have", "from abc import ABC, abstractmethod from collections import defaultdict from", "chans in self._qubit_channel_map.items(): if chan in chans: return qind return", "if chan in chans: return qind return chan.index def get_channel_frequency(self,", "frequencies chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind): freq for qind, freq in", "those information are not provided, this class will be initialized", "qubit index of given channel object.\"\"\" for qind, chans in", "of backend information formatted to generate drawing data. This instance", "def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class with backend information", "mapping qubit_channel_map = defaultdict(list) for qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind))", "associated provider. By default we provide :py:class:``OpenPulseBackendInfo`` class that has", "return chan.index def get_channel_frequency(self, chan: pulse.channels.Channel) -> Union[float, None]: \"\"\"Get", "Each subclass should provide the factory method which conforms to", "'no-backend' self._dt = dt self._chan_freq_map = channel_frequency_map or dict() self._qubit_channel_map", "a class with backend information provided by provider. Args: backend:", "provided, this class will be initialized with a set of", "this abstract class has an abstract factory method `create_from_backend`. Each", "to generate drawing data. This instance will be provided to", "= None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] = None): \"\"\"Create new backend", "chan_freqs[pulse.ControlChannel(qind)] = temp_val.real # load qubit channel mapping qubit_channel_map =", "and the drawer illustrates a pulse program without any specific", "Copyright IBM 2020. # # This code is licensed under", "def get_qubit_index(self, chan: pulse.channels.Channel) -> Union[int, None]: \"\"\"Get associated qubit", "root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.", "NotImplementedError @property def dt(self): \"\"\"Return cycle time.\"\"\" return self._dt def", "information. This may be convenient for visualizing a pulse program", "drawing data generation.\"\"\" def __init__(self, name: Optional[str] = None, dt:", "frequency. qubit_channel_map: Mapping of qubit and associated channels. \"\"\" self.backend_name", "may depend on providers, this abstract class has an abstract", "subclass should provide the factory method which conforms to the", "\"\"\"Return cycle time.\"\"\" return self._dt def get_qubit_index(self, chan: pulse.channels.Channel) ->", "# that they have been altered from the originals. #", "the drawing data generation.\"\"\" def __init__(self, name: Optional[str] = None,", "= .0 + .0j for u_lo_mapper in u_lo_mappers: temp_val +=", "to be used for the drawing data generation.\"\"\" def __init__(self,", "disable=invalid-name \"\"\"A collection of backend information formatted to generate drawing", "associated qubit index of given channel object.\"\"\" for qind, chans", "u_lo_mapper in u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] =", "@classmethod @abstractmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class with", "ABC, abstractmethod from collections import defaultdict from typing import Dict,", "the backend. dt: System cycle time. channel_frequency_map: Mapping of channel", "object.\"\"\" for qind, chans in self._qubit_channel_map.items(): if chan in chans:", "be also initialized without the factory method by manually specifying", "qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except", "[1] Qiskit Backend Specifications for OpenQASM and OpenPulse Experiments, https://arxiv.org/abs/1809.03452", "class with backend information provided by provider. Args: backend: Backend", "specifying required information. This may be convenient for visualizing a", "on providers, this abstract class has an abstract factory method", "qubit and associated channels. \"\"\" self.backend_name = name or 'no-backend'", "import pulse from qiskit.providers import BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend", "pulse.channels.Channel) -> Union[float, None]: \"\"\"Get frequency of given channel object.\"\"\"", "range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind)))", "channel object.\"\"\" return self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of", "Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from abc import ABC, abstractmethod from collections", "Optional[float] = None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] = None, qubit_channel_map: Optional[Dict[int,", "The module provides an abstract class :py:class:``DrawerBackendInfo`` with necessary methods", "# load name name = backend.name() # load cycle time", "conforms to OpenPulse specification.\"\"\" @classmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize", "instance will be provided to generator functions. The module provides", "LICENSE.txt file in the root directory # of this source", "chan in chans: return qind return chan.index def get_channel_frequency(self, chan:", "indicating # that they have been altered from the originals.", "Dict, List, Union, Optional from qiskit import pulse from qiskit.providers", "OpenQASM and OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from abc import ABC,", "that has the factory method taking backends satisfying OpenPulse specification", "chans: return qind return chan.index def get_channel_frequency(self, chan: pulse.channels.Channel) ->", "given channel object.\"\"\" return self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information", "backend information provided by provider. Args: backend: Backend object. Returns:", "enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val = .0 +", "import BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend information to be used", "freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for qind, freq in enumerate(defaults.meas_freq_est)})", "This code is part of Qiskit. # # (C) Copyright", "licensed under the Apache License, Version 2.0. You may #", "BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend information to be used for the", "\"\"\" self.backend_name = name or 'no-backend' self._dt = dt self._chan_freq_map", "load name name = backend.name() # load cycle time dt", "channel object.\"\"\" for qind, chans in self._qubit_channel_map.items(): if chan in", "This requires two mapping objects for channel/qubit and channel/frequency along", "-> Union[int, None]: \"\"\"Get associated qubit index of given channel", "the LICENSE.txt file in the root directory # of this", "Returns: OpenPulseBackendInfo: New configured instance. \"\"\" configuration = backend.configuration() defaults", "= dt self._chan_freq_map = channel_frequency_map or dict() self._qubit_channel_map = qubit_channel_map", "Version 2.0. You may # obtain a copy of this", "* complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real # load qubit channel mapping", "backend: Backend object. \"\"\" raise NotImplementedError @property def dt(self): \"\"\"Return", "configuration = backend.configuration() defaults = backend.defaults() # load name name", "tind in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError: pass return", "given channel object.\"\"\" for qind, chans in self._qubit_channel_map.items(): if chan", "from qiskit import pulse from qiskit.providers import BaseBackend, BackendConfigurationError class", "drawing data. This instance will be provided to generator functions.", "None]: \"\"\"Get associated qubit index of given channel object.\"\"\" for", "qind, freq in enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val", "been altered from the originals. # pylint: disable=invalid-name \"\"\"A collection", "of this license in the LICENSE.txt file in the root", "along with the system cycle time. If those information are", "cycle time. channel_frequency_map: Mapping of channel and associated frequency. qubit_channel_map:", "import Dict, List, Union, Optional from qiskit import pulse from", "in enumerate(configuration.u_channel_lo): temp_val = .0 + .0j for u_lo_mapper in", "or dict() self._qubit_channel_map = qubit_channel_map or dict() @classmethod @abstractmethod def", "DrawerBackendInfo(ABC): \"\"\"Backend information to be used for the drawing data", "to generate drawing objects. Because the data structure of backend", "information. This requires two mapping objects for channel/qubit and channel/frequency", "to generator functions. The module provides an abstract class :py:class:``DrawerBackendInfo``", "tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative", "requires two mapping objects for channel/qubit and channel/frequency along with", "method `create_from_backend`. Each subclass should provide the factory method which", "float]] = None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] = None): \"\"\"Create new", "2020. # # This code is licensed under the Apache", "be initialized with a set of empty data and the", "new backend information. Args: name: Name of the backend. dt:", "of this code must retain this # copyright notice, and", "temp_val += defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real # load", "method by manually specifying required information. This may be convenient", "dt self._chan_freq_map = channel_frequency_map or dict() self._qubit_channel_map = qubit_channel_map or", "notice, and modified files need to carry a notice indicating", "associated frequency. qubit_channel_map: Mapping of qubit and associated channels. \"\"\"", "get_qubit_index(self, chan: pulse.channels.Channel) -> Union[int, None]: \"\"\"Get associated qubit index", "+= defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real # load qubit", "License, Version 2.0. You may # obtain a copy of", "drawing objects. Because the data structure of backend class may", "def __init__(self, name: Optional[str] = None, dt: Optional[float] = None,", "empty data and the drawer illustrates a pulse program without", "system cycle time. If those information are not provided, this", "pulse program for simulator backend that only has a device", "= backend.defaults() # load name name = backend.name() # load", "OpenPulse specification [1]. This class can be also initialized without", "factory method `create_from_backend`. Each subclass should provide the factory method", "obtain a copy of this license in the LICENSE.txt file", "a device Hamiltonian information. This requires two mapping objects for", "simulator backend that only has a device Hamiltonian information. This", "http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this", "data structure of backend class may depend on providers, this", "only has a device Hamiltonian information. This requires two mapping", "time dt = configuration.dt # load frequencies chan_freqs = dict()", "# load frequencies chan_freqs = dict() chan_freqs.update({pulse.DriveChannel(qind): freq for qind,", "name = backend.name() # load cycle time dt = configuration.dt", "2.0. You may # obtain a copy of this license", "any specific information. Reference: - [1] Qiskit Backend Specifications for", "object. Returns: OpenPulseBackendInfo: New configured instance. \"\"\" configuration = backend.configuration()", "that only has a device Hamiltonian information. This requires two", "object.\"\"\" return self._chan_freq_map.get(chan, None) class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of backend", "the originals. # pylint: disable=invalid-name \"\"\"A collection of backend information", "should provide the factory method which conforms to the associated", "channel mapping qubit_channel_map = defaultdict(list) for qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind))", "for tind in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError: pass", "qubit_channel_map or dict() @classmethod @abstractmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize", "class that has the factory method taking backends satisfying OpenPulse", "Backend Specifications for OpenQASM and OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from", "defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real # load qubit channel", "the drawer illustrates a pulse program without any specific information.", "backend: Backend object. Returns: OpenPulseBackendInfo: New configured instance. \"\"\" configuration", "for qind, freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for qind, freq", "freq in enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val =", "also initialized without the factory method by manually specifying required", "this code must retain this # copyright notice, and modified", "by provider. Args: backend: Backend object. Returns: OpenPulseBackendInfo: New configured", "# copyright notice, and modified files need to carry a", "structure of backend class may depend on providers, this abstract", "Because the data structure of backend class may depend on", "List[pulse.channels.Channel]]] = None): \"\"\"Create new backend information. Args: name: Name", "with the system cycle time. If those information are not", "drawer illustrates a pulse program without any specific information. Reference:", "This code is licensed under the Apache License, Version 2.0.", "specification.\"\"\" @classmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class with", "by provider. Args: backend: Backend object. \"\"\" raise NotImplementedError @property", "in enumerate(defaults.meas_freq_est)}) for qind, u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val = .0", "provided by provider. Args: backend: Backend object. \"\"\" raise NotImplementedError", "provider. Args: backend: Backend object. Returns: OpenPulseBackendInfo: New configured instance.", "Args: backend: Backend object. \"\"\" raise NotImplementedError @property def dt(self):", "class has an abstract factory method `create_from_backend`. Each subclass should", "of backend class may depend on providers, this abstract class", "None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] = None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] =", "functions. The module provides an abstract class :py:class:``DrawerBackendInfo`` with necessary", "\"\"\"Backend information to be used for the drawing data generation.\"\"\"", "+ .0j for u_lo_mapper in u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q] *", "Qiskit Backend Specifications for OpenQASM and OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\"", "of the backend. dt: System cycle time. channel_frequency_map: Mapping of", "is part of Qiskit. # # (C) Copyright IBM 2020.", "backend. dt: System cycle time. channel_frequency_map: Mapping of channel and", "be convenient for visualizing a pulse program for simulator backend", "abstractmethod from collections import defaultdict from typing import Dict, List,", "configured instance. \"\"\" configuration = backend.configuration() defaults = backend.defaults() #", "None): \"\"\"Create new backend information. Args: name: Name of the", "pulse.channels.Channel) -> Union[int, None]: \"\"\"Get associated qubit index of given", "specification [1]. This class can be also initialized without the", "None, dt: Optional[float] = None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] = None,", "for visualizing a pulse program for simulator backend that only", "may be convenient for visualizing a pulse program for simulator", ":py:class:``OpenPulseBackendInfo`` class that has the factory method taking backends satisfying", "chan: pulse.channels.Channel) -> Union[int, None]: \"\"\"Get associated qubit index of", "# This code is part of Qiskit. # # (C)", "complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)] = temp_val.real # load qubit channel mapping qubit_channel_map", "\"\"\" configuration = backend.configuration() defaults = backend.defaults() # load name", "# pylint: disable=invalid-name \"\"\"A collection of backend information formatted to", "and OpenPulse Experiments, https://arxiv.org/abs/1809.03452 \"\"\" from abc import ABC, abstractmethod", "used for the drawing data generation.\"\"\" def __init__(self, name: Optional[str]", "dt: Optional[float] = None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] = None, qubit_channel_map:", "generate drawing objects. Because the data structure of backend class", "\"\"\"Create new backend information. Args: name: Name of the backend.", "has a device Hamiltonian information. This requires two mapping objects", "specific information. Reference: - [1] Qiskit Backend Specifications for OpenQASM", "of qubit and associated channels. \"\"\" self.backend_name = name or", "= None, dt: Optional[float] = None, channel_frequency_map: Optional[Dict[pulse.channels.Channel, float]] =", "This may be convenient for visualizing a pulse program for", "qind, u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val = .0 + .0j for", "methods to generate drawing objects. Because the data structure of", "\"\"\"Get frequency of given channel object.\"\"\" return self._chan_freq_map.get(chan, None) class", "program for simulator backend that only has a device Hamiltonian", "source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or", "for u_lo_mapper in u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale) chan_freqs[pulse.ControlChannel(qind)]", "Optional from qiskit import pulse from qiskit.providers import BaseBackend, BackendConfigurationError", "code must retain this # copyright notice, and modified files", "freq for qind, freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for qind,", "the factory method taking backends satisfying OpenPulse specification [1]. This", "try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind, tind))) except BackendConfigurationError: pass return OpenPulseBackendInfo(name=name, dt=dt, channel_frequency_map=chan_freqs,", "chan.index def get_channel_frequency(self, chan: pulse.channels.Channel) -> Union[float, None]: \"\"\"Get frequency", "module provides an abstract class :py:class:``DrawerBackendInfo`` with necessary methods to", "pulse from qiskit.providers import BaseBackend, BackendConfigurationError class DrawerBackendInfo(ABC): \"\"\"Backend information", "\"\"\"Drawing information of backend that conforms to OpenPulse specification.\"\"\" @classmethod", "u_lo_mappers in enumerate(configuration.u_channel_lo): temp_val = .0 + .0j for u_lo_mapper", "objects. Because the data structure of backend class may depend", "[1]. This class can be also initialized without the factory", "self._dt = dt self._chan_freq_map = channel_frequency_map or dict() self._qubit_channel_map =", "https://arxiv.org/abs/1809.03452 \"\"\" from abc import ABC, abstractmethod from collections import", ".0j for u_lo_mapper in u_lo_mappers: temp_val += defaults.qubit_freq_est[u_lo_mapper.q] * complex(*u_lo_mapper.scale)", "mapping objects for channel/qubit and channel/frequency along with the system", "pulse program without any specific information. Reference: - [1] Qiskit", "\"\"\"Get associated qubit index of given channel object.\"\"\" for qind,", "qubit_channel_map = defaultdict(list) for qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for", "under the Apache License, Version 2.0. You may # obtain", "necessary methods to generate drawing objects. Because the data structure", "= backend.configuration() defaults = backend.defaults() # load name name =", "in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind in range(configuration.n_qubits): try: qubit_channel_map[qind].extend(configuration.control(qubits=(qind,", "name name = backend.name() # load cycle time dt =", "from the originals. # pylint: disable=invalid-name \"\"\"A collection of backend", "method which conforms to the associated provider. By default we", "a pulse program for simulator backend that only has a", "or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works", "chan: pulse.channels.Channel) -> Union[float, None]: \"\"\"Get frequency of given channel", "depend on providers, this abstract class has an abstract factory", "class OpenPulseBackendInfo(DrawerBackendInfo): \"\"\"Drawing information of backend that conforms to OpenPulse", "# This code is licensed under the Apache License, Version", "provided to generator functions. The module provides an abstract class", "IBM 2020. # # This code is licensed under the", "Hamiltonian information. This requires two mapping objects for channel/qubit and", "self._chan_freq_map = channel_frequency_map or dict() self._qubit_channel_map = qubit_channel_map or dict()", "this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications", "# (C) Copyright IBM 2020. # # This code is", "provides an abstract class :py:class:``DrawerBackendInfo`` with necessary methods to generate", "qubit channel mapping qubit_channel_map = defaultdict(list) for qind in range(configuration.n_qubits):", "index of given channel object.\"\"\" for qind, chans in self._qubit_channel_map.items():", "qind, chans in self._qubit_channel_map.items(): if chan in chans: return qind", "raise NotImplementedError @property def dt(self): \"\"\"Return cycle time.\"\"\" return self._dt", "import ABC, abstractmethod from collections import defaultdict from typing import", "time. channel_frequency_map: Mapping of channel and associated frequency. qubit_channel_map: Mapping", "temp_val.real # load qubit channel mapping qubit_channel_map = defaultdict(list) for", "to carry a notice indicating # that they have been", "Args: backend: Backend object. Returns: OpenPulseBackendInfo: New configured instance. \"\"\"", "@property def dt(self): \"\"\"Return cycle time.\"\"\" return self._dt def get_qubit_index(self,", "\"\"\"A collection of backend information formatted to generate drawing data.", "device Hamiltonian information. This requires two mapping objects for channel/qubit", "Optional[Dict[int, List[pulse.channels.Channel]]] = None): \"\"\"Create new backend information. Args: name:", "defaultdict from typing import Dict, List, Union, Optional from qiskit", "are not provided, this class will be initialized with a", "Qiskit. # # (C) Copyright IBM 2020. # # This", "- [1] Qiskit Backend Specifications for OpenQASM and OpenPulse Experiments,", "backend: BaseBackend): \"\"\"Initialize a class with backend information provided by", "and associated channels. \"\"\" self.backend_name = name or 'no-backend' self._dt", "not provided, this class will be initialized with a set", "channel/frequency along with the system cycle time. If those information", "= backend.name() # load cycle time dt = configuration.dt #", "code is part of Qiskit. # # (C) Copyright IBM", "will be initialized with a set of empty data and", "this class will be initialized with a set of empty", "-> Union[float, None]: \"\"\"Get frequency of given channel object.\"\"\" return", "of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any", "provide :py:class:``OpenPulseBackendInfo`` class that has the factory method taking backends", "name or 'no-backend' self._dt = dt self._chan_freq_map = channel_frequency_map or", "of empty data and the drawer illustrates a pulse program", "= qubit_channel_map or dict() @classmethod @abstractmethod def create_from_backend(cls, backend: BaseBackend):", "= defaultdict(list) for qind in range(configuration.n_qubits): qubit_channel_map[qind].append(configuration.drive(qubit=qind)) qubit_channel_map[qind].append(configuration.measure(qubit=qind)) for tind", "code is licensed under the Apache License, Version 2.0. You", "@classmethod def create_from_backend(cls, backend: BaseBackend): \"\"\"Initialize a class with backend", "from collections import defaultdict from typing import Dict, List, Union,", "(C) Copyright IBM 2020. # # This code is licensed", "backend class may depend on providers, this abstract class has", "objects for channel/qubit and channel/frequency along with the system cycle", "satisfying OpenPulse specification [1]. This class can be also initialized", "generator functions. The module provides an abstract class :py:class:``DrawerBackendInfo`` with", "copy of this license in the LICENSE.txt file in the", "has an abstract factory method `create_from_backend`. Each subclass should provide", "None]: \"\"\"Get frequency of given channel object.\"\"\" return self._chan_freq_map.get(chan, None)", "method taking backends satisfying OpenPulse specification [1]. This class can", "and channel/frequency along with the system cycle time. If those", "Name of the backend. dt: System cycle time. channel_frequency_map: Mapping", "information provided by provider. Args: backend: Backend object. Returns: OpenPulseBackendInfo:", "required information. This may be convenient for visualizing a pulse", "in the LICENSE.txt file in the root directory # of", "dict() chan_freqs.update({pulse.DriveChannel(qind): freq for qind, freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq", "# Any modifications or derivative works of this code must", "this # copyright notice, and modified files need to carry", "collection of backend information formatted to generate drawing data. This", "formatted to generate drawing data. This instance will be provided", "By default we provide :py:class:``OpenPulseBackendInfo`` class that has the factory", "of backend that conforms to OpenPulse specification.\"\"\" @classmethod def create_from_backend(cls,", "self._dt def get_qubit_index(self, chan: pulse.channels.Channel) -> Union[int, None]: \"\"\"Get associated", "convenient for visualizing a pulse program for simulator backend that", "name: Optional[str] = None, dt: Optional[float] = None, channel_frequency_map: Optional[Dict[pulse.channels.Channel,", "None, qubit_channel_map: Optional[Dict[int, List[pulse.channels.Channel]]] = None): \"\"\"Create new backend information.", "cycle time.\"\"\" return self._dt def get_qubit_index(self, chan: pulse.channels.Channel) -> Union[int,", "backend.name() # load cycle time dt = configuration.dt # load", "Union, Optional from qiskit import pulse from qiskit.providers import BaseBackend,", "qind, freq in enumerate(defaults.qubit_freq_est)}) chan_freqs.update({pulse.MeasureChannel(qind): freq for qind, freq in" ]
[ "'-mf_rating') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url", "return context class MfundListView_Reco(ListView): model = Mfund def get_queryset(self): queryset", "def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def get_context_data(self, **kwargs):", "model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category',", "context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC_Amount(ListView): model = Mfund", "get(self, request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView, self).__init__() def", "plot_div_1 = plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context", "from django.utils.decorators import method_decorator from django.views.generic.list import ListView from django.views", "model = Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')).", "for q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum']", "\\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "redirect from django.contrib.auth.decorators import login_required from django.utils.decorators import method_decorator from", "for q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum']", "q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values dict',", "1 # declaring template # first delete all existing mfund", "import method_decorator from django.views.generic.list import ListView from django.views import View", "= refresh_url return context class MfundListView_AMC_Amount(ListView): model = Mfund def", "class MfundRefreshView(View): debug_level = 1 def get(self, request): self.mfund_refresh(request) return", "dict', labels_values_dict) for k, v in sorted(labels_values_dict.items(), key=lambda item: item[1]):", "django.db.models import Q from django.conf import settings from django.shortcuts import", "queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value') return queryset", "super(MfundListView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url", "return context class MfundListView_SubcatAmount(ListView): model = Mfund def get_queryset(self): self.queryset", "request): debug_level = 1 # declaring template # first delete", "unique_id += 1 print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value,", "model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco',", "', max_mf_id) unique_id = max_mf_id for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id", "Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value,", "get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset", "return context class MfundListView_Amount(ListView): model = Mfund def get_queryset(self): queryset", "= Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\", "get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required) def dispatch(self, *args,", "django.conf import settings from django.shortcuts import redirect from django.contrib.auth.decorators import", "plotly.tools import make_subplots from django.db.models import Q from django.conf import", "k, v in sorted(labels_values_dict.items(), key=lambda item: item[1]): labels.append(k) values.append(v) print('labels", "django.contrib import messages from django.urls import reverse from django.http import", "# declaring template # first delete all existing mfund objects", "1 def get(self, request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView,", "print('max_mf_id ', max_mf_id) unique_id = max_mf_id for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id):", "= Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC_Amount(ListView): model", "max_mf_id = max_id_instances['max_id'] print('DS: found max id ', max_mf_id) if", "get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat', '-mf_nav_value') return queryset", "is desired # paginate_by = 300 # filter_backends = [filters.OrderingFilter,]", "fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show() plot_div_1 = plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1']", "textinfo='percent+label') # fig.show() plot_div_1 = plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] =", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value') return queryset def", "lastrefd_update from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def Mfund_url(): return \"unused-mfund-refresh-url\" class", "Sum, Max, Min from django.db.models.functions import Trim, Lower, Round import", "from plotly.offline import plot from plotly.tools import make_subplots from django.db.models", "from django.contrib.auth.decorators import login_required from django.utils.decorators import method_decorator from django.views.generic.list", "Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def get_context_data(self,", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value,", "HttpResponseRedirect from django_gotolong.lastrefd.models import Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf", "context[\"refresh_url\"] = refresh_url return context class MfundListView_Reco(ListView): model = Mfund", "import BrokerIcidirMf def Mfund_url(): return \"unused-mfund-refresh-url\" class MfundListView(ListView): model =", "= Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category',", "refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC_Amount(ListView):", "model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat',", "refresh_url return context class MfundListView_AMC_Amount(ListView): model = Mfund def get_queryset(self):", "print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) # skip 0 units if", "return context class MfundListView_AMC_Amount(ListView): model = Mfund def get_queryset(self): self.queryset", "if int(float(brec.bim_units)) != 0: _, created = Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id,", "MfundListView_Subcat(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\", "get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url = Mfund_url() context[\"refresh_url\"] =", "from django.db.models import OuterRef, Subquery, Count, Sum, Max, Min from", "ListView from django.views import View from django.db.models import OuterRef, Subquery,", "values = [] labels_values_dict = {} sum_total = 0 for", "io import openpyxl from django.contrib import messages from django.urls import", "__init__(self): super(MfundRefreshView, self).__init__() def mfund_refresh(self, request): debug_level = 1 #", ".models import Mfund import plotly.graph_objects as go from plotly.offline import", "django.urls import reverse from django.http import HttpResponseRedirect from django_gotolong.lastrefd.models import", "return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url =", "output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class MfundListView_Category(ListView): model", "_, created = Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category,", "objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id'] print('DS: found", "labels.append(k) values.append(v) print('labels ', labels) print('values ', values) fig =", "messages from django.urls import reverse from django.http import HttpResponseRedirect from", "class MfundListView_Reco(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).", "refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Reco(ListView):", "refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Subcat(ListView):", "import plot from plotly.tools import make_subplots from django.db.models import Q", "\\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset) return self.queryset def get_context_data(self, **kwargs):", "mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id'] print('DS:", "values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show() plot_div_1 = plot(fig, output_type='div', include_plotlyjs=False)", "context = super().get_context_data(**kwargs) refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return", "key=lambda item: item[1]): labels.append(k) values.append(v) print('labels ', labels) print('values ',", "in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total'] =", "openpyxl from django.contrib import messages from django.urls import reverse from", "import login_required from django.utils.decorators import method_decorator from django.views.generic.list import ListView", "Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat', '-mf_nav_value')", "= 0 for q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']]", "import csv, io import openpyxl from django.contrib import messages from", "Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs):", "sorted(labels_values_dict.items(), key=lambda item: item[1]): labels.append(k) values.append(v) print('labels ', labels) print('values", "self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def", "# Create your views here. from .models import Mfund import", "def __init__(self): super(MfundRefreshView, self).__init__() def mfund_refresh(self, request): debug_level = 1", "django.contrib.auth.decorators import login_required from django.utils.decorators import method_decorator from django.views.generic.list import", "get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) labels = [] values =", "context class MfundListView_Subcat(ListView): model = Mfund def get_queryset(self): queryset =", "\\ order_by('mf_research_reco', '-mf_rating') return queryset def get_context_data(self, **kwargs): context =", "[] labels_values_dict = {} sum_total = 0 for q_row in", "brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) # skip 0 units", "from plotly.tools import make_subplots from django.db.models import Q from django.conf", "= plot_div_1 return context class MfundListView_Category(ListView): model = Mfund def", "import HttpResponseRedirect from django_gotolong.lastrefd.models import Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models import", "brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) # skip 0", "pandas as pd import csv, io import openpyxl from django.contrib", "values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset) return self.queryset def get_context_data(self,", "import openpyxl from django.contrib import messages from django.urls import reverse", "mfund_refresh(self, request): debug_level = 1 # declaring template # first", "exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset) return self.queryset def get_context_data(self, **kwargs): context", "sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels", "Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC(ListView): model =", "for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1 print(brec.bim_amc, brec.bim_name, brec.bim_category,", "print('labels ', labels) print('values ', values) fig = go.Figure(data=[go.Pie(labels=labels, values=values)])", "refresh_url return context class MfundListView_Amount(ListView): model = Mfund def get_queryset(self):", "# filter_backends = [filters.OrderingFilter,] # ordering_fields = ['sno', 'nse_symbol'] def", "= super().get_context_data(**kwargs) refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context", "from .models import Mfund import plotly.graph_objects as go from plotly.offline", "MfundListView_AMC_Amount(ListView): model = Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\", "context[\"refresh_url\"] = refresh_url return context class MfundListView_Amount(ListView): model = Mfund", "refresh_url return context class MfundListView_SubcatAmount(ListView): model = Mfund def get_queryset(self):", "# if pagination is desired # paginate_by = 300 #", "values dict', labels_values_dict) for k, v in sorted(labels_values_dict.items(), key=lambda item:", "make_subplots from django.db.models import Q from django.conf import settings from", "\\ order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs):", "values.append(v) print('labels ', labels) print('values ', values) fig = go.Figure(data=[go.Pie(labels=labels,", "plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class MfundListView_Category(ListView):", "first delete all existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id'))", "max_mf_id) unique_id = max_mf_id for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id +=", "paginate_by = 300 # filter_backends = [filters.OrderingFilter,] # ordering_fields =", "id ', max_mf_id) if max_mf_id is None: max_mf_id = 0", "BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1 print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units,", "**kwargs): context = super().get_context_data(**kwargs) refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url", "queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value') return queryset def get_context_data(self,", "max id ', max_mf_id) if max_mf_id is None: max_mf_id =", "from django.contrib import messages from django.urls import reverse from django.http", "class MfundListView_Amount(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value')", "return super(MfundListView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self,", "Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating') return queryset def get_context_data(self, **kwargs):", "import messages from django.urls import reverse from django.http import HttpResponseRedirect", "\\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def get_context_data(self, **kwargs): context", "labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values dict', labels_values_dict)", "# import pdb # pdb.set_trace() # Updated Gfundareco objects lastrefd_update(\"mfund\")", "MfundRefreshView(View): debug_level = 1 def get(self, request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\"))", "dispatch(self, *args, **kwargs): return super(MfundListView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs):", "context class MfundListView_AMC_Amount(ListView): model = Mfund def get_queryset(self): self.queryset =", "= [filters.OrderingFilter,] # ordering_fields = ['sno', 'nse_symbol'] def get_queryset(self): queryset", "Min from django.db.models.functions import Trim, Lower, Round import pandas as", "= Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id'] print('DS: found max id ',", "get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ',", "return context class MfundListView_AMC(ListView): model = Mfund def get_queryset(self): queryset", "= Mfund # if pagination is desired # paginate_by =", "output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class MfundRefreshView(View): debug_level", "go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show() plot_div_1 = plot(fig, output_type='div',", "Trim, Lower, Round import pandas as pd import csv, io", "brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1 print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat)", "'mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context =", "values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def get_context_data(self, **kwargs): context =", "brec.bim_nav_value) print(brec.bim_research_reco) # skip 0 units if int(float(brec.bim_units)) != 0:", "django.views import View from django.db.models import OuterRef, Subquery, Count, Sum,", "int(sum_total) print('labels values dict', labels_values_dict) for k, v in sorted(labels_values_dict.items(),", "delete all existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id", "plot from plotly.tools import make_subplots from django.db.models import Q from", "', labels) print('values ', values) fig = go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside',", "pagination is desired # paginate_by = 300 # filter_backends =", "debug_level = 1 # declaring template # first delete all", "1 print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco)", "= 300 # filter_backends = [filters.OrderingFilter,] # ordering_fields = ['sno',", "context['plot_div_1'] = plot_div_1 return context class MfundRefreshView(View): debug_level = 1", "mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) # breakpoint() # import pdb #", "import Q from django.conf import settings from django.shortcuts import redirect", "max_id_instances['max_id'] print('DS: found max id ', max_mf_id) if max_mf_id is", "def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value')", "import OuterRef, Subquery, Count, Sum, Max, Min from django.db.models.functions import", "found max id ', max_mf_id) if max_mf_id is None: max_mf_id", "sum_total = 0 for q_row in self.queryset: sum_total += q_row['scheme_sum']", "\\ order_by('mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context", "= 1 # declaring template # first delete all existing", "import reverse from django.http import HttpResponseRedirect from django_gotolong.lastrefd.models import Lastrefd,", "model = Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')).", "breakpoint() # import pdb # pdb.set_trace() # Updated Gfundareco objects", "Q from django.conf import settings from django.shortcuts import redirect from", "get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def get_context_data(self, **kwargs): context", "Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id'] print('DS: found max id ', max_mf_id)", "Round import pandas as pd import csv, io import openpyxl", "return self.queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) labels =", "self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView, self).__init__() def mfund_refresh(self, request):", "\\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset) return self.queryset def", "django.shortcuts import redirect from django.contrib.auth.decorators import login_required from django.utils.decorators import", "!= 0: _, created = Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc,", "order_by('mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context =", "self).__init__() def mfund_refresh(self, request): debug_level = 1 # declaring template", "get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value') return queryset def", "Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_SubcatAmount(ListView): model =", "mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) # breakpoint() #", "return \"unused-mfund-refresh-url\" class MfundListView(ListView): model = Mfund # if pagination", "{} sum_total = 0 for q_row in self.queryset: sum_total +=", "= Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating,", "all existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id =", "super().get_context_data(**kwargs) labels = [] values = [] labels_values_dict = {}", "mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) # breakpoint() # import pdb", "import View from django.db.models import OuterRef, Subquery, Count, Sum, Max,", "from django.views import View from django.db.models import OuterRef, Subquery, Count,", "None: max_mf_id = 0 print('max_mf_id ', max_mf_id) unique_id = max_mf_id", "item[1]): labels.append(k) values.append(v) print('labels ', labels) print('values ', values) fig", "mf_research_reco=brec.bim_research_reco ) # breakpoint() # import pdb # pdb.set_trace() #", "django.http import HttpResponseRedirect from django_gotolong.lastrefd.models import Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models", "refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC(ListView):", "MfundListView_SubcatAmount(ListView): model = Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\", "**kwargs): context = super().get_context_data(**kwargs) labels = [] values = []", "here. from .models import Mfund import plotly.graph_objects as go from", "Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating') return queryset def get_context_data(self, **kwargs): context", "labels_values_dict) for k, v in sorted(labels_values_dict.items(), key=lambda item: item[1]): labels.append(k)", "class MfundListView_AMC_Amount(ListView): model = Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).", "# skip 0 units if int(float(brec.bim_units)) != 0: _, created", "refresh_url return context class MfundListView_Subcat(ListView): model = Mfund def get_queryset(self):", "**kwargs) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url = Mfund_url()", "= refresh_url return context class MfundListView_SubcatAmount(ListView): model = Mfund def", "0: _, created = Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name,", "q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values dict',", "+= 1 print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value)", "Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def Mfund_url(): return \"unused-mfund-refresh-url\"", "context class MfundListView_AMC(ListView): model = Mfund def get_queryset(self): queryset =", "queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat', '-mf_nav_value') return queryset def", "context = super().get_context_data(**kwargs) labels = [] values = [] labels_values_dict", "if pagination is desired # paginate_by = 300 # filter_backends", "= 1 def get(self, request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self):", "plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class MfundRefreshView(View):", "brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) # skip", "context[\"refresh_url\"] = refresh_url return context class MfundListView_SubcatAmount(ListView): model = Mfund", "@method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(MfundListView, self).dispatch(*args, **kwargs) def", "context class MfundListView_SubcatAmount(ListView): model = Mfund def get_queryset(self): self.queryset =", "queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def get_context_data(self, **kwargs): context =", "refresh_url return context class MfundListView_Reco(ListView): model = Mfund def get_queryset(self):", "Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating') return", "brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) # skip 0 units if int(float(brec.bim_units))", "= Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC(ListView): model", "return context class MfundRefreshView(View): debug_level = 1 def get(self, request):", "csv, io import openpyxl from django.contrib import messages from django.urls", "return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView, self).__init__() def mfund_refresh(self, request): debug_level", "units if int(float(brec.bim_units)) != 0: _, created = Mfund.objects.update_or_create( mf_id=unique_id,", "def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating') return queryset", "OuterRef, Subquery, Count, Sum, Max, Min from django.db.models.functions import Trim,", "int(float(brec.bim_units)) != 0: _, created = Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir',", "super().get_context_data(**kwargs) refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class", "from django.db.models.functions import Trim, Lower, Round import pandas as pd", "0 for q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] =", "mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco", "= {} sum_total = 0 for q_row in self.queryset: sum_total", "import ListView from django.views import View from django.db.models import OuterRef,", "self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total)", "refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_SubcatAmount(ListView):", "queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url = Mfund_url()", "plot_div_1 return context class MfundListView_Category(ListView): model = Mfund def get_queryset(self):", "BrokerIcidirMf def Mfund_url(): return \"unused-mfund-refresh-url\" class MfundListView(ListView): model = Mfund", "= 0 for q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']]", "def dispatch(self, *args, **kwargs): return super(MfundListView, self).dispatch(*args, **kwargs) def get_context_data(self,", "import Trim, Lower, Round import pandas as pd import csv,", "= super().get_context_data(**kwargs) labels = [] values = [] labels_values_dict =", "Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id'] print('DS: found max", "= Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Subcat(ListView): model", "exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) labels", "= Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_SubcatAmount(ListView): model", "[] values = [] labels_values_dict = {} sum_total = 0", "def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value') return queryset", "order_by('mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "Mfund # if pagination is desired # paginate_by = 300", "', values) fig = go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show()", "refresh_url return context class MfundListView_AMC(ListView): model = Mfund def get_queryset(self):", "from django_gotolong.lastrefd.models import Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def", "context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC(ListView): model = Mfund", "def Mfund_url(): return \"unused-mfund-refresh-url\" class MfundListView(ListView): model = Mfund #", "class MfundListView_Category(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).", "django.views.generic.list import ListView from django.views import View from django.db.models import", "is None: max_mf_id = 0 print('max_mf_id ', max_mf_id) unique_id =", "in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total'] =", "labels) print('values ', values) fig = go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label')", "= ['sno', 'nse_symbol'] def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset", "*args, **kwargs): return super(MfundListView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context", "context class MfundListView_Category(ListView): model = Mfund def get_queryset(self): queryset =", "def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required) def dispatch(self,", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self,", "as pd import csv, io import openpyxl from django.contrib import", "# fig.show() plot_div_1 = plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1", "queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required) def dispatch(self, *args, **kwargs):", "Mfund import plotly.graph_objects as go from plotly.offline import plot from", "q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total']", "created = Mfund.objects.update_or_create( mf_id=unique_id, mf_user_id=request.user.id, mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat,", "ordering_fields = ['sno', 'nse_symbol'] def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return", "model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset", "in sorted(labels_values_dict.items(), key=lambda item: item[1]): labels.append(k) values.append(v) print('labels ', labels)", "values) fig = go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show() plot_div_1", "super(MfundRefreshView, self).__init__() def mfund_refresh(self, request): debug_level = 1 # declaring", "existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id']", "= Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Amount(ListView): model", "class MfundListView_AMC(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).", "template # first delete all existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances", "refresh_url = Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Amount(ListView):", "fig = go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show() plot_div_1 =", "def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat', '-mf_nav_value') return", "from django.conf import settings from django.shortcuts import redirect from django.contrib.auth.decorators", "desired # paginate_by = 300 # filter_backends = [filters.OrderingFilter,] #", "= refresh_url return context class MfundListView_Reco(ListView): model = Mfund def", ") # breakpoint() # import pdb # pdb.set_trace() # Updated", "queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating') return queryset def get_context_data(self,", "def mfund_refresh(self, request): debug_level = 1 # declaring template #", "0 print('max_mf_id ', max_mf_id) unique_id = max_mf_id for brec in", "Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(MfundListView,", "order_by('mf_research_reco', '-mf_rating') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "MfundListView(ListView): model = Mfund # if pagination is desired #", "Create your views here. from .models import Mfund import plotly.graph_objects", "django.utils.decorators import method_decorator from django.views.generic.list import ListView from django.views import", "mf_broker='icidir', mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco )", "import make_subplots from django.db.models import Q from django.conf import settings", "class MfundListView_SubcatAmount(ListView): model = Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).", "def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return", "def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url = Mfund_url() context[\"refresh_url\"]", "**kwargs): return super(MfundListView, self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context =", "q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values dict', labels_values_dict) for k,", "declaring template # first delete all existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete()", "context['plot_div_1'] = plot_div_1 return context class MfundListView_Category(ListView): model = Mfund", "Lower, Round import pandas as pd import csv, io import", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def get_context_data(self,", "def get(self, request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView, self).__init__()", "print('DS: found max id ', max_mf_id) if max_mf_id is None:", "max_mf_id) if max_mf_id is None: max_mf_id = 0 print('max_mf_id ',", "Count, Sum, Max, Min from django.db.models.functions import Trim, Lower, Round", "django.db.models.functions import Trim, Lower, Round import pandas as pd import", "# ordering_fields = ['sno', 'nse_symbol'] def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id)", "context['sum_total'] = int(sum_total) print('labels values dict', labels_values_dict) for k, v", "print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating, brec.bim_units, brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) #", "login_required from django.utils.decorators import method_decorator from django.views.generic.list import ListView from", "Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum')", "from django.views.generic.list import ListView from django.views import View from django.db.models", "queryset @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(MfundListView, self).dispatch(*args, **kwargs)", "mf_amc=brec.bim_amc, mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) #", "context[\"refresh_url\"] = refresh_url return context class MfundListView_Subcat(ListView): model = Mfund", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required) def dispatch(self, *args, **kwargs): return", "MfundListView_Amount(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return", "from django.shortcuts import redirect from django.contrib.auth.decorators import login_required from django.utils.decorators", "0 for q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] =", "labels = [] values = [] labels_values_dict = {} sum_total", "context class MfundListView_Amount(ListView): model = Mfund def get_queryset(self): queryset =", "self.queryset) return self.queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) labels", "plot_div_1 return context class MfundRefreshView(View): debug_level = 1 def get(self,", "debug_level = 1 def get(self, request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def", "import pandas as pd import csv, io import openpyxl from", "your views here. from .models import Mfund import plotly.graph_objects as", "= Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value')", "skip 0 units if int(float(brec.bim_units)) != 0: _, created =", "\\ order_by('mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context =", "django_gotolong.lastrefd.models import Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def Mfund_url():", "= Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def", "= Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating')", "django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def Mfund_url(): return \"unused-mfund-refresh-url\" class MfundListView(ListView): model", "\"unused-mfund-refresh-url\" class MfundListView(ListView): model = Mfund # if pagination is", "'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset)", "= q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values dict', labels_values_dict) for", "= go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') # fig.show() plot_div_1 = plot(fig,", "filter_backends = [filters.OrderingFilter,] # ordering_fields = ['sno', 'nse_symbol'] def get_queryset(self):", "from django.db.models import Q from django.conf import settings from django.shortcuts", "= 0 print('max_mf_id ', max_mf_id) unique_id = max_mf_id for brec", "View from django.db.models import OuterRef, Subquery, Count, Sum, Max, Min", "model = Mfund # if pagination is desired # paginate_by", "self).dispatch(*args, **kwargs) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url =", "= Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Reco(ListView): model", "= plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class", "fig.show() plot_div_1 = plot(fig, output_type='div', include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return", "context class MfundRefreshView(View): debug_level = 1 def get(self, request): self.mfund_refresh(request)", "settings from django.shortcuts import redirect from django.contrib.auth.decorators import login_required from", "if max_mf_id is None: max_mf_id = 0 print('max_mf_id ', max_mf_id)", "MfundListView_Reco(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\", "0 units if int(float(brec.bim_units)) != 0: _, created = Mfund.objects.update_or_create(", "include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class MfundRefreshView(View): debug_level =", "# breakpoint() # import pdb # pdb.set_trace() # Updated Gfundareco", "order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs): context", "print('values ', values) fig = go.Figure(data=[go.Pie(labels=labels, values=values)]) fig.update_traces(textposition='inside', textinfo='percent+label') #", "Mfund.objects.all().filter(mf_user_id=self.request.user.id).order_by('-mf_nav_value') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url", "Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category', 'mf_subcat',", "= [] labels_values_dict = {} sum_total = 0 for q_row", "', max_mf_id) if max_mf_id is None: max_mf_id = 0 print('max_mf_id", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset) return", "print('labels values dict', labels_values_dict) for k, v in sorted(labels_values_dict.items(), key=lambda", "[filters.OrderingFilter,] # ordering_fields = ['sno', 'nse_symbol'] def get_queryset(self): queryset =", "mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) # breakpoint() # import", "Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_AMC_Amount(ListView): model =", "= refresh_url return context class MfundListView_Amount(ListView): model = Mfund def", "max_mf_id is None: max_mf_id = 0 print('max_mf_id ', max_mf_id) unique_id", "= [] values = [] labels_values_dict = {} sum_total =", "= Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_category', 'mf_subcat',", "method_decorator from django.views.generic.list import ListView from django.views import View from", "['sno', 'nse_symbol'] def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required)", "labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values dict', labels_values_dict)", "max_mf_id = 0 print('max_mf_id ', max_mf_id) unique_id = max_mf_id for", "300 # filter_backends = [filters.OrderingFilter,] # ordering_fields = ['sno', 'nse_symbol']", "labels_values_dict = {} sum_total = 0 for q_row in self.queryset:", "class MfundListView(ListView): model = Mfund # if pagination is desired", "context class MfundListView_Reco(ListView): model = Mfund def get_queryset(self): queryset =", "'-mf_nav_value') return queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) refresh_url", "views here. from .models import Mfund import plotly.graph_objects as go", "+= q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values", "return context class MfundListView_Subcat(ListView): model = Mfund def get_queryset(self): queryset", "= max_id_instances['max_id'] print('DS: found max id ', max_mf_id) if max_mf_id", "brec.bim_cost_value, brec.bim_nav_value) print(brec.bim_research_reco) # skip 0 units if int(float(brec.bim_units)) !=", "HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView, self).__init__() def mfund_refresh(self, request): debug_level =", "plotly.graph_objects as go from plotly.offline import plot from plotly.tools import", "import Mfund import plotly.graph_objects as go from plotly.offline import plot", "item: item[1]): labels.append(k) values.append(v) print('labels ', labels) print('values ', values)", "v in sorted(labels_values_dict.items(), key=lambda item: item[1]): labels.append(k) values.append(v) print('labels ',", "unique_id = max_mf_id for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1", "in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1 print(brec.bim_amc, brec.bim_name, brec.bim_category, brec.bim_subcat) print(brec.bim_rating,", "sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels", "pd import csv, io import openpyxl from django.contrib import messages", "from django.urls import reverse from django.http import HttpResponseRedirect from django_gotolong.lastrefd.models", "= refresh_url return context class MfundListView_Subcat(ListView): model = Mfund def", "print('hi ', self.queryset) return self.queryset def get_context_data(self, **kwargs): context =", "reverse from django.http import HttpResponseRedirect from django_gotolong.lastrefd.models import Lastrefd, lastrefd_update", "self.queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) labels = []", "max_mf_id for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1 print(brec.bim_amc, brec.bim_name,", "MfundListView_Category(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\", "import redirect from django.contrib.auth.decorators import login_required from django.utils.decorators import method_decorator", "# first delete all existing mfund objects Mfund.objects.all().filter(mf_user_id=request.user.id).delete() max_id_instances =", "= int(sum_total) print('labels values dict', labels_values_dict) for k, v in", "model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc',", "self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_amc']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total)", "Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') return self.queryset def get_context_data(self, **kwargs):", "return context class MfundListView_Category(ListView): model = Mfund def get_queryset(self): queryset", "max_id_instances = Mfund.objects.aggregate(max_id=Max('mf_id')) max_mf_id = max_id_instances['max_id'] print('DS: found max id", "django.db.models import OuterRef, Subquery, Count, Sum, Max, Min from django.db.models.functions", "', self.queryset) return self.queryset def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs)", "Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Subcat(ListView): model =", "return queryset @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super(MfundListView, self).dispatch(*args,", "def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) labels = [] values", "= Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_subcat').annotate(scheme_sum=Sum('mf_nav_value')). \\", "class MfundListView_Subcat(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id).", "request): self.mfund_refresh(request) return HttpResponseRedirect(reverse(\"mfund-list\")) def __init__(self): super(MfundRefreshView, self).__init__() def mfund_refresh(self,", "Mfund_url(): return \"unused-mfund-refresh-url\" class MfundListView(ListView): model = Mfund # if", "Max, Min from django.db.models.functions import Trim, Lower, Round import pandas", "Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Reco(ListView): model =", "+= q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total'] = int(sum_total) print('labels values", "get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_research_reco', '-mf_rating') return queryset def", "q_row in self.queryset: sum_total += q_row['scheme_sum'] labels_values_dict[q_row['mf_subcat']] = q_row['scheme_sum'] context['sum_total']", "def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi", "Mfund_url() context[\"refresh_url\"] = refresh_url return context class MfundListView_Amount(ListView): model =", "Mfund def get_queryset(self): self.queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum')", "from django.http import HttpResponseRedirect from django_gotolong.lastrefd.models import Lastrefd, lastrefd_update from", "for k, v in sorted(labels_values_dict.items(), key=lambda item: item[1]): labels.append(k) values.append(v)", "Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value') return", "print(brec.bim_research_reco) # skip 0 units if int(float(brec.bim_units)) != 0: _,", "# paginate_by = 300 # filter_backends = [filters.OrderingFilter,] # ordering_fields", "= max_mf_id for brec in BrokerIcidirMf.objects.all().filter(bim_user_id=request.user.id): unique_id += 1 print(brec.bim_amc,", "go from plotly.offline import plot from plotly.tools import make_subplots from", "from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def Mfund_url(): return \"unused-mfund-refresh-url\" class MfundListView(ListView):", "import settings from django.shortcuts import redirect from django.contrib.auth.decorators import login_required", "= Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_subcat', '-mf_nav_value') return queryset def get_context_data(self, **kwargs):", "MfundListView_AMC(ListView): model = Mfund def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\", "import plotly.graph_objects as go from plotly.offline import plot from plotly.tools", "plotly.offline import plot from plotly.tools import make_subplots from django.db.models import", "Subquery, Count, Sum, Max, Min from django.db.models.functions import Trim, Lower,", "mf_name=brec.bim_name, mf_category=brec.bim_category, mf_subcat=brec.bim_subcat, mf_rating=brec.bim_rating, mf_cost_value=brec.bim_cost_value, mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) # breakpoint()", "as go from plotly.offline import plot from plotly.tools import make_subplots", "get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ order_by('mf_amc', 'mf_category', 'mf_subcat', '-mf_nav_value') return", "include_plotlyjs=False) context['plot_div_1'] = plot_div_1 return context class MfundListView_Category(ListView): model =", "Mfund.objects.all().filter(mf_user_id=self.request.user.id). \\ values('mf_amc').annotate(scheme_sum=Sum('mf_nav_value')). \\ exclude(scheme_sum=0.0).order_by('-scheme_sum') print('hi ', self.queryset) return self.queryset", "= plot_div_1 return context class MfundRefreshView(View): debug_level = 1 def", "mf_nav_value=brec.bim_nav_value, mf_research_reco=brec.bim_research_reco ) # breakpoint() # import pdb # pdb.set_trace()", "= refresh_url return context class MfundListView_AMC(ListView): model = Mfund def", "import Lastrefd, lastrefd_update from django_gotolong.broker.icidir.imf.models import BrokerIcidirMf def Mfund_url(): return", "'nse_symbol'] def get_queryset(self): queryset = Mfund.objects.all().filter(mf_user_id=self.request.user.id) return queryset @method_decorator(login_required) def" ]
[ "g1.join() else: print( r.status_code) def _download(self, ts_list): self.pool.map(self._worker, ts_list) if", "int((r-l)/2) except : if int((r-l)/2) == 0: for i in", "def getMoreTsList(self,ts_list): headers = {'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS", "= len(ts_list) print(self.ts_total) g1 = gevent.spawn(self._join_file) self._download(ts_list) g1.join() else: print(", "index = 0 outfile = '' while index < self.ts_total:", "l = r = int(tsNum) maxTs = 0 while retry", "ts_tuple): url = ts_tuple[0] index = ts_tuple[1] retry = self.retry", "not n.startswith(\"#\")] if moreTs: ts_list = self.getMoreTsList(ts_list) ts_list = list(zip(ts_list,", "ts_list if __name__ == '__main__': downloader = Downloader(5) downloader.run('https://www.xiaodianying.com/filets/2069/dp.m3u8', './video',True)", "\".ts\") return ts_list maxTs = r r = r -", "= r + 1 r = l + 100 if", "headers = {'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like", "ts_tuple[0] index = ts_tuple[1] retry = self.retry while retry: try:", "while retry or isOk: try: isOk = urllib.request.urlopen(req).status==200 if isOk:", "= '' self.succed = {} self.failed = [] self.ts_total =", "def __init__(self, pool_size, retry=3): self.pool = Pool(pool_size) self.session = self._get_http_session(pool_size,", "in range(len(list(ts_list)))])) if ts_list: self.ts_total = len(ts_list) print(self.ts_total) g1 =", "isOk: retry = 3 l = r + 1 r", "pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 ) nextTs", "urllib import os import time import re import ssl class", "'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' }", "+ 1 r = l + 100 if maxTs <", "ssl._create_default_https_context = ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url, n.strip()) for n in", "1 else: time.sleep(1) if outfile: outfile.close() def getMoreTsList(self,ts_list): headers =", "= r r = r - int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1)", "if outfile: outfile.close() def getMoreTsList(self,ts_list): headers = {'user-agent': 'Mozilla/5.0 (iPhone;", "isOk = False lastTs = ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum", "= [] self.ts_total = 0 def _get_http_session(self, pool_connections, pool_maxsize, max_retries):", "retry or isOk: try: isOk = urllib.request.urlopen(req).status==200 if isOk: retry", "from gevent.pool import Pool import gevent import requests import urllib", "url.split('/')[-1].split('?')[0] print( file_name) with open(os.path.join(self.dir, file_name), 'wb') as f: f.write(r.content)", "100 if maxTs < r else maxTs - int((maxTs-l)/2) nextTs", "int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else:", "r.ok: body = r.content if body: ssl._create_default_https_context = ssl._create_unverified_context ts_list", "l + 100 if maxTs < r else maxTs -", "= r - int((r-l)/2) except : if int((r-l)/2) == 0:", "utf-8 from gevent import monkey monkey.patch_all() from gevent.pool import Pool", "retry: try: r = self.session.get(url, timeout=20) if r.ok: file_name =", "range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return ts_list maxTs =", "ts_list): self.pool.map(self._worker, ts_list) if self.failed: ts_list = self.failed self.failed =", "1 r = l + 100 if maxTs < r", "ts_list = [urllib.parse.urljoin(m3u8_url, n.strip()) for n in str(body, encoding =", "= '' while index < self.ts_total: file_name = self.succed.get(index, '')", "try: r = self.session.get(url, timeout=20) if r.ok: file_name = url.split('/')[-1].split('?')[0]", "re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1 isOk", "len(ts_list) print(self.ts_total) g1 = gevent.spawn(self._join_file) self._download(ts_list) g1.join() else: print( r.status_code)", "n and not n.startswith(\"#\")] if moreTs: ts_list = self.getMoreTsList(ts_list) ts_list", "int(tsNum) maxTs = 0 while retry or isOk: try: isOk", ") nextTs = re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l", "_download(self, ts_list): self.pool.map(self._worker, ts_list) if self.failed: ts_list = self.failed self.failed", "OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko)", "'scheme':'https' } retry = self.retry isOk = False lastTs =", "maxTs - int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req =", "\".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1 isOk = False", "urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r = int(tsNum) maxTs = 0 while", "ts_list) if self.failed: ts_list = self.failed self.failed = [] self._download(ts_list)", "AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding':", "session.mount('http://', adapter) session.mount('https://', adapter) return session def run(self, m3u8_url, dir='',moreTs=False):", ", r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return ts_list maxTs = r", "self.failed.append((url, index)) def _join_file(self): index = 0 outfile = ''", "False return ts_list if __name__ == '__main__': downloader = Downloader(5)", "url = ts_tuple[0] index = ts_tuple[1] retry = self.retry while", "r - int((r-l)/2) except : if int((r-l)/2) == 0: for", "g1 = gevent.spawn(self._join_file) self._download(ts_list) g1.join() else: print( r.status_code) def _download(self,", "if n and not n.startswith(\"#\")] if moreTs: ts_list = self.getMoreTsList(ts_list)", "n.strip()) for n in str(body, encoding = \"utf8\").split('\\n') if n", "class Downloader: def __init__(self, pool_size, retry=3): self.pool = Pool(pool_size) self.session", "= re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 ) nextTs =", "__init__(self, pool_size, retry=3): self.pool = Pool(pool_size) self.session = self._get_http_session(pool_size, pool_size,", "self.dir = '' self.succed = {} self.failed = [] self.ts_total", "range(len(list(ts_list)))])) if ts_list: self.ts_total = len(ts_list) print(self.ts_total) g1 = gevent.spawn(self._join_file)", "gevent import requests import urllib import os import time import", "try: isOk = urllib.request.urlopen(req).status==200 if isOk: retry = 3 l", "= open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name)) index +=", "if file_name: infile = open(os.path.join(self.dir, file_name), 'rb') if not outfile:", "= retry self.dir = '' self.succed = {} self.failed =", "11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0", "- int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET')", "+ \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r = int(tsNum)", "[] self._download(ts_list) def _worker(self, ts_tuple): url = ts_tuple[0] index =", "r = int(tsNum) maxTs = 0 while retry or isOk:", "max_retries): session = requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://',", "pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter) session.mount('https://', adapter) return session def run(self,", "pool_size, retry=3): self.pool = Pool(pool_size) self.session = self._get_http_session(pool_size, pool_size, retry)", "= url.split('/')[-1].split('?')[0] print( file_name) with open(os.path.join(self.dir, file_name), 'wb') as f:", "adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter) session.mount('https://', adapter) return", "br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' } retry = self.retry isOk", "r = r - int((r-l)/2) except : if int((r-l)/2) ==", "OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1', 'accept':", "outfile = '' while index < self.ts_total: file_name = self.succed.get(index,", "= ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1", "{'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS", "= self.session.get(m3u8_url, timeout=10) if r.ok: body = r.content if body:", "r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return ts_list maxTs = r r", "retry = self.retry while retry: try: r = self.session.get(url, timeout=20)", "(iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38", "req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1 isOk = False return", "= ts_tuple[1] retry = self.retry while retry: try: r =", "-= 1 isOk = False return ts_list if __name__ ==", "import monkey monkey.patch_all() from gevent.pool import Pool import gevent import", "Version/11.0 Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br', 'accept-language':", "- int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET')", "= [] self._download(ts_list) def _worker(self, ts_tuple): url = ts_tuple[0] index", "0 while retry or isOk: try: isOk = urllib.request.urlopen(req).status==200 if", "r else maxTs - int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\"", "print( file_name) with open(os.path.join(self.dir, file_name), 'wb') as f: f.write(r.content) self.succed[index]", "and not n.startswith(\"#\")] if moreTs: ts_list = self.getMoreTsList(ts_list) ts_list =", "'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name)) index += 1 else: time.sleep(1)", "False lastTs = ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0])", "def run(self, m3u8_url, dir='',moreTs=False): self.dir = dir if self.dir and", "_join_file(self): index = 0 outfile = '' while index <", "import Pool import gevent import requests import urllib import os", "file_name) with open(os.path.join(self.dir, file_name), 'wb') as f: f.write(r.content) self.succed[index] =", "+ 100 if maxTs < r else maxTs - int((maxTs-l)/2)", "= False return ts_list if __name__ == '__main__': downloader =", "[urllib.parse.urljoin(m3u8_url, n.strip()) for n in str(body, encoding = \"utf8\").split('\\n') if", "r.status_code) def _download(self, ts_list): self.pool.map(self._worker, ts_list) if self.failed: ts_list =", "r - int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req =", "requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter) session.mount('https://', adapter) return session def", "= int(tsNum) maxTs = 0 while retry or isOk: try:", "ts_list: self.ts_total = len(ts_list) print(self.ts_total) g1 = gevent.spawn(self._join_file) self._download(ts_list) g1.join()", "self._download(ts_list) g1.join() else: print( r.status_code) def _download(self, ts_list): self.pool.map(self._worker, ts_list)", "\".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r = r - int((r-l)/2)", "int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry", "adapter) return session def run(self, m3u8_url, dir='',moreTs=False): self.dir = dir", "'rb') if not outfile: outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read())", "retry) self.retry = retry self.dir = '' self.succed = {}", "= r - int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req", "= {'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac", "str(body, encoding = \"utf8\").split('\\n') if n and not n.startswith(\"#\")] if", "retry self.dir = '' self.succed = {} self.failed = []", "return ts_list maxTs = r r = r - int((r-l)/2)", "'') if file_name: infile = open(os.path.join(self.dir, file_name), 'rb') if not", "= self.succed.get(index, '') if file_name: infile = open(os.path.join(self.dir, file_name), 'rb')", ": if int((r-l)/2) == 0: for i in range(int(tsNum) ,", "req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r = int(tsNum) maxTs =", "r = self.session.get(url, timeout=20) if r.ok: file_name = url.split('/')[-1].split('?')[0] print(", "self.dir and not os.path.isdir(self.dir): os.makedirs(self.dir) r = self.session.get(m3u8_url, timeout=10) if", "ts_list = self.getMoreTsList(ts_list) ts_list = list(zip(ts_list, [n for n in", "not os.path.isdir(self.dir): os.makedirs(self.dir) r = self.session.get(m3u8_url, timeout=10) if r.ok: body", "dir='',moreTs=False): self.dir = dir if self.dir and not os.path.isdir(self.dir): os.makedirs(self.dir)", "pool_size, retry) self.retry = retry self.dir = '' self.succed =", "file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name)) index += 1 else:", "gevent.spawn(self._join_file) self._download(ts_list) g1.join() else: print( r.status_code) def _download(self, ts_list): self.pool.map(self._worker,", "deflate, br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' } retry = self.retry", "self.pool.map(self._worker, ts_list) if self.failed: ts_list = self.failed self.failed = []", "= open(os.path.join(self.dir, file_name), 'rb') if not outfile: outfile = open(os.path.join(self.dir,", "as f: f.write(r.content) self.succed[index] = file_name return except: retry -=", "def _join_file(self): index = 0 outfile = '' while index", "self.ts_total = len(ts_list) print(self.ts_total) g1 = gevent.spawn(self._join_file) self._download(ts_list) g1.join() else:", "if self.failed: ts_list = self.failed self.failed = [] self._download(ts_list) def", "< r else maxTs - int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) +", "l = r + 1 r = l + 100", "= ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url, n.strip()) for n in str(body,", "re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r =", "self.succed = {} self.failed = [] self.ts_total = 0 def", "os.remove(os.path.join(self.dir, file_name)) index += 1 else: time.sleep(1) if outfile: outfile.close()", "open(os.path.join(self.dir, file_name), 'wb') as f: f.write(r.content) self.succed[index] = file_name return", "(KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip,", "body: ssl._create_default_https_context = ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url, n.strip()) for n", "r = self.session.get(m3u8_url, timeout=10) if r.ok: body = r.content if", "if ts_list: self.ts_total = len(ts_list) print(self.ts_total) g1 = gevent.spawn(self._join_file) self._download(ts_list)", "self.retry = retry self.dir = '' self.succed = {} self.failed", "1 isOk = False return ts_list if __name__ == '__main__':", "isOk: try: isOk = urllib.request.urlopen(req).status==200 if isOk: retry = 3", "import time import re import ssl class Downloader: def __init__(self,", "self.ts_total: file_name = self.succed.get(index, '') if file_name: infile = open(os.path.join(self.dir,", "retry -= 1 isOk = False return ts_list if __name__", "= re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r", "infile.close() os.remove(os.path.join(self.dir, file_name)) index += 1 else: time.sleep(1) if outfile:", "self._download(ts_list) def _worker(self, ts_tuple): url = ts_tuple[0] index = ts_tuple[1]", "if r.ok: body = r.content if body: ssl._create_default_https_context = ssl._create_unverified_context", "with open(os.path.join(self.dir, file_name), 'wb') as f: f.write(r.content) self.succed[index] = file_name", "session def run(self, m3u8_url, dir='',moreTs=False): self.dir = dir if self.dir", "import os import time import re import ssl class Downloader:", "= Pool(pool_size) self.session = self._get_http_session(pool_size, pool_size, retry) self.retry = retry", "Gecko) Version/11.0 Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br',", "file_name)) index += 1 else: time.sleep(1) if outfile: outfile.close() def", "else maxTs - int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req", "run(self, m3u8_url, dir='',moreTs=False): self.dir = dir if self.dir and not", "'' while index < self.ts_total: file_name = self.succed.get(index, '') if", "urllib.request.urlopen(req).status==200 if isOk: retry = 3 l = r +", "{} self.failed = [] self.ts_total = 0 def _get_http_session(self, pool_connections,", "if r.ok: file_name = url.split('/')[-1].split('?')[0] print( file_name) with open(os.path.join(self.dir, file_name),", "import ssl class Downloader: def __init__(self, pool_size, retry=3): self.pool =", "= self.session.get(url, timeout=20) if r.ok: file_name = url.split('/')[-1].split('?')[0] print( file_name)", "isOk = urllib.request.urlopen(req).status==200 if isOk: retry = 3 l =", "= file_name return except: retry -= 1 print ('[FAIL]%s' %", "Pool(pool_size) self.session = self._get_http_session(pool_size, pool_size, retry) self.retry = retry self.dir", "nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r", "self.pool = Pool(pool_size) self.session = self._get_http_session(pool_size, pool_size, retry) self.retry =", "= 0 def _get_http_session(self, pool_connections, pool_maxsize, max_retries): session = requests.Session()", "-= 1 print ('[FAIL]%s' % url) self.failed.append((url, index)) def _join_file(self):", "retry=3): self.pool = Pool(pool_size) self.session = self._get_http_session(pool_size, pool_size, retry) self.retry", "= r = int(tsNum) maxTs = 0 while retry or", "} retry = self.retry isOk = False lastTs = ts_list[-1]", "= self.getMoreTsList(ts_list) ts_list = list(zip(ts_list, [n for n in range(len(list(ts_list)))]))", "index < self.ts_total: file_name = self.succed.get(index, '') if file_name: infile", "os.makedirs(self.dir) r = self.session.get(m3u8_url, timeout=10) if r.ok: body = r.content", "< self.ts_total: file_name = self.succed.get(index, '') if file_name: infile =", "file_name return except: retry -= 1 print ('[FAIL]%s' % url)", "os.path.isdir(self.dir): os.makedirs(self.dir) r = self.session.get(m3u8_url, timeout=10) if r.ok: body =", "\".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r = int(tsNum) maxTs", "nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -=", "== 0: for i in range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) +", "+ \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1 isOk =", "index += 1 else: time.sleep(1) if outfile: outfile.close() def getMoreTsList(self,ts_list):", "isOk = False return ts_list if __name__ == '__main__': downloader", "print(self.ts_total) g1 = gevent.spawn(self._join_file) self._download(ts_list) g1.join() else: print( r.status_code) def", "self._get_http_session(pool_size, pool_size, retry) self.retry = retry self.dir = '' self.succed", "= [urllib.parse.urljoin(m3u8_url, n.strip()) for n in str(body, encoding = \"utf8\").split('\\n')", "ts_tuple[1] retry = self.retry while retry: try: r = self.session.get(url,", "ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url, n.strip()) for n in str(body, encoding", "encoding = \"utf8\").split('\\n') if n and not n.startswith(\"#\")] if moreTs:", "= self.retry while retry: try: r = self.session.get(url, timeout=20) if", "n in range(len(list(ts_list)))])) if ts_list: self.ts_total = len(ts_list) print(self.ts_total) g1", "1 ) nextTs = re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET')", "Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9',", "tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 ) nextTs = re.sub(pattern,str(tsNum),lastTs,1) +", "ts_list = self.failed self.failed = [] self._download(ts_list) def _worker(self, ts_tuple):", "= 3 l = r + 1 r = l", "open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name)) index += 1", "i in range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return ts_list", "if maxTs < r else maxTs - int((maxTs-l)/2) nextTs =", "print( r.status_code) def _download(self, ts_list): self.pool.map(self._worker, ts_list) if self.failed: ts_list", "X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',", "[] self.ts_total = 0 def _get_http_session(self, pool_connections, pool_maxsize, max_retries): session", "maxTs < r else maxTs - int((maxTs-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1)", "not outfile: outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir,", "'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' } retry =", "'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' } retry = self.retry isOk = False", "m3u8_url, dir='',moreTs=False): self.dir = dir if self.dir and not os.path.isdir(self.dir):", "+= 1 else: time.sleep(1) if outfile: outfile.close() def getMoreTsList(self,ts_list): headers", "= gevent.spawn(self._join_file) self._download(ts_list) g1.join() else: print( r.status_code) def _download(self, ts_list):", "index = ts_tuple[1] retry = self.retry while retry: try: r", "% url) self.failed.append((url, index)) def _join_file(self): index = 0 outfile", "except: retry -= 1 print ('[FAIL]%s' % url) self.failed.append((url, index))", "= re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r =", "retry = self.retry isOk = False lastTs = ts_list[-1] pattern", "= urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1 isOk = False return ts_list", "pool_maxsize, max_retries): session = requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries)", "r = l + 100 if maxTs < r else", "os import time import re import ssl class Downloader: def", "= re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1", "self.retry while retry: try: r = self.session.get(url, timeout=20) if r.ok:", "and not os.path.isdir(self.dir): os.makedirs(self.dir) r = self.session.get(m3u8_url, timeout=10) if r.ok:", "'{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 ) nextTs = re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req", "return except: retry -= 1 print ('[FAIL]%s' % url) self.failed.append((url,", "re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r = r", "url) self.failed.append((url, index)) def _join_file(self): index = 0 outfile =", "monkey.patch_all() from gevent.pool import Pool import gevent import requests import", "if not outfile: outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close()", "Downloader: def __init__(self, pool_size, retry=3): self.pool = Pool(pool_size) self.session =", "0 outfile = '' while index < self.ts_total: file_name =", "= \"utf8\").split('\\n') if n and not n.startswith(\"#\")] if moreTs: ts_list", "else: print( r.status_code) def _download(self, ts_list): self.pool.map(self._worker, ts_list) if self.failed:", "r + 1 r = l + 100 if maxTs", "self.failed = [] self.ts_total = 0 def _get_http_session(self, pool_connections, pool_maxsize,", "import requests import urllib import os import time import re", "'wb') as f: f.write(r.content) self.succed[index] = file_name return except: retry", "= r.content if body: ssl._create_default_https_context = ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url,", "[n for n in range(len(list(ts_list)))])) if ts_list: self.ts_total = len(ts_list)", "ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 )", "r = r - int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) + \".ts\"", "'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' } retry", "requests import urllib import os import time import re import", "'upgrade-insecure-requests':1, 'scheme':'https' } retry = self.retry isOk = False lastTs", "#coding: utf-8 from gevent import monkey monkey.patch_all() from gevent.pool import", "lastTs = ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) +", "= urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r = r - int((r-l)/2) except :", "monkey monkey.patch_all() from gevent.pool import Pool import gevent import requests", "re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum = '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 ) nextTs = re.sub(pattern,str(tsNum),lastTs,1)", "r r = r - int((r-l)/2) nextTs = re.sub(pattern,'{:0>3}'.format(r),lastTs,1) +", "ts_list = list(zip(ts_list, [n for n in range(len(list(ts_list)))])) if ts_list:", "in str(body, encoding = \"utf8\").split('\\n') if n and not n.startswith(\"#\")]", "timeout=20) if r.ok: file_name = url.split('/')[-1].split('?')[0] print( file_name) with open(os.path.join(self.dir,", "file_name: infile = open(os.path.join(self.dir, file_name), 'rb') if not outfile: outfile", "or isOk: try: isOk = urllib.request.urlopen(req).status==200 if isOk: retry =", "open(os.path.join(self.dir, file_name), 'rb') if not outfile: outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]),", "= dir if self.dir and not os.path.isdir(self.dir): os.makedirs(self.dir) r =", "outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name)) index += 1 else: time.sleep(1) if", "outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name)) index", "self.failed: ts_list = self.failed self.failed = [] self._download(ts_list) def _worker(self,", "for n in str(body, encoding = \"utf8\").split('\\n') if n and", "ts_list maxTs = r r = r - int((r-l)/2) nextTs", "= list(zip(ts_list, [n for n in range(len(list(ts_list)))])) if ts_list: self.ts_total", "return session def run(self, m3u8_url, dir='',moreTs=False): self.dir = dir if", "= 0 outfile = '' while index < self.ts_total: file_name", "except : if int((r-l)/2) == 0: for i in range(int(tsNum)", "int((r-l)/2) == 0: for i in range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1)", "timeout=10) if r.ok: body = r.content if body: ssl._create_default_https_context =", "max_retries=max_retries) session.mount('http://', adapter) session.mount('https://', adapter) return session def run(self, m3u8_url,", "Pool import gevent import requests import urllib import os import", "req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r = r - int((r-l)/2) except", "outfile.close() def getMoreTsList(self,ts_list): headers = {'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone", "if isOk: retry = 3 l = r + 1", "in range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return ts_list maxTs", "for i in range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return", "pool_connections, pool_maxsize, max_retries): session = requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize,", "def _worker(self, ts_tuple): url = ts_tuple[0] index = ts_tuple[1] retry", "self.session = self._get_http_session(pool_size, pool_size, retry) self.retry = retry self.dir =", "= l + 100 if maxTs < r else maxTs", "return ts_list if __name__ == '__main__': downloader = Downloader(5) downloader.run('https://www.xiaodianying.com/filets/2069/dp.m3u8',", "self.succed.get(index, '') if file_name: infile = open(os.path.join(self.dir, file_name), 'rb') if", "self.ts_total = 0 def _get_http_session(self, pool_connections, pool_maxsize, max_retries): session =", "else: time.sleep(1) if outfile: outfile.close() def getMoreTsList(self,ts_list): headers = {'user-agent':", "= ts_tuple[0] index = ts_tuple[1] retry = self.retry while retry:", "adapter) session.mount('https://', adapter) return session def run(self, m3u8_url, dir='',moreTs=False): self.dir", "else: r = r - int((r-l)/2) except : if int((r-l)/2)", "from gevent import monkey monkey.patch_all() from gevent.pool import Pool import", "= self.retry isOk = False lastTs = ts_list[-1] pattern =", "n in str(body, encoding = \"utf8\").split('\\n') if n and not", "while index < self.ts_total: file_name = self.succed.get(index, '') if file_name:", "0 def _get_http_session(self, pool_connections, pool_maxsize, max_retries): session = requests.Session() adapter", "'' self.succed = {} self.failed = [] self.ts_total = 0", "body = r.content if body: ssl._create_default_https_context = ssl._create_unverified_context ts_list =", "session.mount('https://', adapter) return session def run(self, m3u8_url, dir='',moreTs=False): self.dir =", "= self.failed self.failed = [] self._download(ts_list) def _worker(self, ts_tuple): url", "list(zip(ts_list, [n for n in range(len(list(ts_list)))])) if ts_list: self.ts_total =", "f.write(r.content) self.succed[index] = file_name return except: retry -= 1 print", "index)) def _join_file(self): index = 0 outfile = '' while", "def _get_http_session(self, pool_connections, pool_maxsize, max_retries): session = requests.Session() adapter =", "retry = 3 l = r + 1 r =", "import gevent import requests import urllib import os import time", "f: f.write(r.content) self.succed[index] = file_name return except: retry -= 1", "= urllib.request.urlopen(req).status==200 if isOk: retry = 3 l = r", "= requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter) session.mount('https://', adapter) return session", "nextTs = re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') l =", "def _download(self, ts_list): self.pool.map(self._worker, ts_list) if self.failed: ts_list = self.failed", "self.retry isOk = False lastTs = ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts')", "requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter) session.mount('https://', adapter)", "getMoreTsList(self,ts_list): headers = {'user-agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0", "self.dir = dir if self.dir and not os.path.isdir(self.dir): os.makedirs(self.dir) r", "urllib.request.Request(url=nextTs,headers=headers,method='GET') retry -= 1 isOk = False return ts_list if", "file_name), 'wb') as f: f.write(r.content) self.succed[index] = file_name return except:", "file_name), 'rb') if not outfile: outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb')", "Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1,", "retry -= 1 print ('[FAIL]%s' % url) self.failed.append((url, index)) def", "0: for i in range(int(tsNum) , r): ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\")", "outfile: outfile = open(os.path.join(self.dir, file_name.split('.')[0]+'_all.'+file_name.split('.')[-1]), 'wb') outfile.write(infile.read()) infile.close() os.remove(os.path.join(self.dir, file_name))", "CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML,", "= urllib.request.Request(url=nextTs,headers=headers,method='GET') l = r = int(tsNum) maxTs = 0", "= requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter) session.mount('https://',", "outfile: outfile.close() def getMoreTsList(self,ts_list): headers = {'user-agent': 'Mozilla/5.0 (iPhone; CPU", "for n in range(len(list(ts_list)))])) if ts_list: self.ts_total = len(ts_list) print(self.ts_total)", "print ('[FAIL]%s' % url) self.failed.append((url, index)) def _join_file(self): index =", "self.session.get(url, timeout=20) if r.ok: file_name = url.split('/')[-1].split('?')[0] print( file_name) with", "dir if self.dir and not os.path.isdir(self.dir): os.makedirs(self.dir) r = self.session.get(m3u8_url,", "self.session.get(m3u8_url, timeout=10) if r.ok: body = r.content if body: ssl._create_default_https_context", "if int((r-l)/2) == 0: for i in range(int(tsNum) , r):", "ssl class Downloader: def __init__(self, pool_size, retry=3): self.pool = Pool(pool_size)", "if self.dir and not os.path.isdir(self.dir): os.makedirs(self.dir) r = self.session.get(m3u8_url, timeout=10)", "n.startswith(\"#\")] if moreTs: ts_list = self.getMoreTsList(ts_list) ts_list = list(zip(ts_list, [n", "'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X)", "\"utf8\").split('\\n') if n and not n.startswith(\"#\")] if moreTs: ts_list =", "- int((r-l)/2) except : if int((r-l)/2) == 0: for i", "3 l = r + 1 r = l +", "maxTs = 0 while retry or isOk: try: isOk =", "_get_http_session(self, pool_connections, pool_maxsize, max_retries): session = requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections,", "gevent import monkey monkey.patch_all() from gevent.pool import Pool import gevent", "iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like", "= False lastTs = ts_list[-1] pattern = re.compile(r'(\\d+\\.?\\d)\\.ts') tsNum =", "+ \".ts\" req = urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r = r -", "ts_list.append(re.sub(pattern,'{:0>3}'.format(i),lastTs,1) + \".ts\") return ts_list maxTs = r r =", "_worker(self, ts_tuple): url = ts_tuple[0] index = ts_tuple[1] retry =", "file_name = self.succed.get(index, '') if file_name: infile = open(os.path.join(self.dir, file_name),", "r.content if body: ssl._create_default_https_context = ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url, n.strip())", "import re import ssl class Downloader: def __init__(self, pool_size, retry=3):", "self.failed self.failed = [] self._download(ts_list) def _worker(self, ts_tuple): url =", "gevent.pool import Pool import gevent import requests import urllib import", "r.ok: file_name = url.split('/')[-1].split('?')[0] print( file_name) with open(os.path.join(self.dir, file_name), 'wb')", "like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372", "self.failed = [] self._download(ts_list) def _worker(self, ts_tuple): url = ts_tuple[0]", "'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https' } retry = self.retry isOk =", "1 print ('[FAIL]%s' % url) self.failed.append((url, index)) def _join_file(self): index", "'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate, br', 'accept-language': 'zh-CN,zh;q=0.9', 'upgrade-insecure-requests':1, 'scheme':'https'", "= {} self.failed = [] self.ts_total = 0 def _get_http_session(self,", "urllib.request.Request(url=nextTs,headers=headers,method='GET') else: r = r - int((r-l)/2) except : if", "+ 1 ) nextTs = re.sub(pattern,str(tsNum),lastTs,1) + \".ts\" req =", "+ \".ts\") return ts_list maxTs = r r = r", "= self._get_http_session(pool_size, pool_size, retry) self.retry = retry self.dir = ''", "file_name = url.split('/')[-1].split('?')[0] print( file_name) with open(os.path.join(self.dir, file_name), 'wb') as", "session = requests.Session() adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections, pool_maxsize=pool_maxsize, max_retries=max_retries) session.mount('http://', adapter)", "maxTs = r r = r - int((r-l)/2) nextTs =", "time import re import ssl class Downloader: def __init__(self, pool_size,", "self.succed[index] = file_name return except: retry -= 1 print ('[FAIL]%s'", "re import ssl class Downloader: def __init__(self, pool_size, retry=3): self.pool", "infile = open(os.path.join(self.dir, file_name), 'rb') if not outfile: outfile =", "('[FAIL]%s' % url) self.failed.append((url, index)) def _join_file(self): index = 0", "= 0 while retry or isOk: try: isOk = urllib.request.urlopen(req).status==200", "Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1',", "while retry: try: r = self.session.get(url, timeout=20) if r.ok: file_name", "time.sleep(1) if outfile: outfile.close() def getMoreTsList(self,ts_list): headers = {'user-agent': 'Mozilla/5.0", "like Gecko) Version/11.0 Mobile/15A372 Safari/604.1', 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'accept-encoding': 'gzip, deflate,", "if moreTs: ts_list = self.getMoreTsList(ts_list) ts_list = list(zip(ts_list, [n for", "import urllib import os import time import re import ssl", "moreTs: ts_list = self.getMoreTsList(ts_list) ts_list = list(zip(ts_list, [n for n", "self.getMoreTsList(ts_list) ts_list = list(zip(ts_list, [n for n in range(len(list(ts_list)))])) if", "if body: ssl._create_default_https_context = ssl._create_unverified_context ts_list = [urllib.parse.urljoin(m3u8_url, n.strip()) for", "= '{:0>3}'.format(int(pattern.findall(lastTs)[0]) + 1 ) nextTs = re.sub(pattern,str(tsNum),lastTs,1) + \".ts\"" ]
[ "Django 3.2.9 on 2021-12-06 10:02 from django.db import migrations, models", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('restaurants',", "= [ migrations.AddField( model_name='restaurant', name='description', field=models.CharField(default='Description', max_length=255, verbose_name='Description'), preserve_default=False, ),", "by Django 3.2.9 on 2021-12-06 10:02 from django.db import migrations,", "[ migrations.AddField( model_name='restaurant', name='description', field=models.CharField(default='Description', max_length=255, verbose_name='Description'), preserve_default=False, ), ]", "# Generated by Django 3.2.9 on 2021-12-06 10:02 from django.db", "10:02 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "Migration(migrations.Migration): dependencies = [ ('restaurants', '0001_initial'), ] operations = [", "on 2021-12-06 10:02 from django.db import migrations, models class Migration(migrations.Migration):", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('restaurants', '0001_initial'),", "] operations = [ migrations.AddField( model_name='restaurant', name='description', field=models.CharField(default='Description', max_length=255, verbose_name='Description'),", "migrations, models class Migration(migrations.Migration): dependencies = [ ('restaurants', '0001_initial'), ]", "Generated by Django 3.2.9 on 2021-12-06 10:02 from django.db import", "dependencies = [ ('restaurants', '0001_initial'), ] operations = [ migrations.AddField(", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "models class Migration(migrations.Migration): dependencies = [ ('restaurants', '0001_initial'), ] operations", "class Migration(migrations.Migration): dependencies = [ ('restaurants', '0001_initial'), ] operations =", "= [ ('restaurants', '0001_initial'), ] operations = [ migrations.AddField( model_name='restaurant',", "('restaurants', '0001_initial'), ] operations = [ migrations.AddField( model_name='restaurant', name='description', field=models.CharField(default='Description',", "'0001_initial'), ] operations = [ migrations.AddField( model_name='restaurant', name='description', field=models.CharField(default='Description', max_length=255,", "3.2.9 on 2021-12-06 10:02 from django.db import migrations, models class", "operations = [ migrations.AddField( model_name='restaurant', name='description', field=models.CharField(default='Description', max_length=255, verbose_name='Description'), preserve_default=False,", "<filename>buzzbox/restaurants/migrations/0002_restaurant_description.py # Generated by Django 3.2.9 on 2021-12-06 10:02 from", "[ ('restaurants', '0001_initial'), ] operations = [ migrations.AddField( model_name='restaurant', name='description',", "2021-12-06 10:02 from django.db import migrations, models class Migration(migrations.Migration): dependencies" ]
[ "python3 import os import contextlib from PyQt5 import QtCore, QtWidgets", "% domain if settings.contains(sname): path = settings.value(sname) while True: name,", "item in enum: self.addItem(enum.label(item), item) if value is not None:", "is not None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self, value): for index,", "resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return name if resp ==", "self.target = target self._stack = [] @contextlib.contextmanager def _layout(self, cls,", "extension): name = '%s.%s' % (name, extension) if os.path.exists(name): resp", "self.addItem(enum.label(item), item) if value is not None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def", "1, 1) layout.setSpacing(1) yield layout @contextlib.contextmanager def vbox(self, *args, **kwargs):", "with Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s'", "cover return self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def form(self, *args, **kwargs): class", "dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if", "= enum for item in enum: self.addItem(enum.label(item), item) if value", "addRow(self, label, widget=None): # pylint: disable=C0111 if isinstance(label, str): label", "self._stack.append(layout) try: yield layout finally: self._pop(*args, **kwargs) def _pop(self, *args,", "layout): self.addRow(layout) def addRow(self, label, widget=None): # pylint: disable=C0111 if", "from PyQt5 import QtCore, QtWidgets from dsrlib.settings import Settings class", "1) layout.setSpacing(1) yield layout @contextlib.contextmanager def vbox(self, *args, **kwargs): #", "dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path, '*.%s' % extension if", "self._stack[-1] if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else: if isinstance(parent, QtWidgets.QSplitter): container", "_('Save'), path, '*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name: return", "self._pop(*args, **kwargs) def _pop(self, *args, **kwargs): layout = self._stack.pop() if", "cls() self._stack.append(layout) try: yield layout finally: self._pop(*args, **kwargs) def _pop(self,", "layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout def stack(self,", "path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s' % domain if settings.contains(sname):", "self._stack = [] @contextlib.contextmanager def _layout(self, cls, *args, **kwargs): layout", "no cover return self._layout(QtWidgets.QSplitter, *args, **kwargs) def getSaveFilename(parent, domain, extension):", "as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s' % domain", "os.path.dirname(name)) return name return None class EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object)", "*args, **kwargs) def split(self, *args, **kwargs): # pragma: no cover", "= target self._stack = [] @contextlib.contextmanager def _layout(self, cls, *args,", "layout def stack(self, *args, **kwargs): # pragma: no cover return", "== QtWidgets.QMessageBox.No: continue return None settings.setValue(sname, os.path.dirname(name)) return name def", "name: return None if not name.endswith('.%s' % extension): name =", "name if resp == QtWidgets.QMessageBox.No: continue return None settings.setValue(sname, os.path.dirname(name))", "index, item in enumerate(self._enum): if value == item: self.setCurrentIndex(index) break", "= 'save_%s' % domain if settings.contains(sname): path = settings.value(sname) while", "extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name: return None if not name.endswith('.%s'", "**kwargs) def _pop(self, *args, **kwargs): layout = self._stack.pop() if self._stack:", "_('This file already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp == QtWidgets.QMessageBox.Yes:", "self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def form(self, *args, **kwargs): class _FormLayout(QtWidgets.QFormLayout): def", "None if not name.endswith('.%s' % extension): name = '%s.%s' %", "QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container = QtWidgets.QWidget(self.target) container.setLayout(layout)", "= '%s.%s' % (name, extension) if os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent,", "if value == item: self.setCurrentIndex(index) break else: raise ValueError('Value \"%s\"", "QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container = QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else: if", "isinstance(layout, QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0, 0) layout2.addWidget(layout)", "layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0, 0) layout2.addWidget(layout) self.target.setLayout(layout2) else:", "if not name.endswith('.%s' % extension): name = '%s.%s' % (name,", "**kwargs): # pragma: no cover return self._layout(QtWidgets.QSplitter, *args, **kwargs) def", "*args, enum, value=None, **kwargs): super().__init__(*args, **kwargs) self._enum = enum for", "if resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return name if resp", "as layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout @contextlib.contextmanager", "yield layout @contextlib.contextmanager def vbox(self, *args, **kwargs): # pragma: no", "else '') if name: settings.setValue(sname, os.path.dirname(name)) return name return None", "enum for item in enum: self.addItem(enum.label(item), item) if value is", "getSaveFilename(parent, domain, extension): with Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation)", "if value is not None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self, value):", "break else: raise ValueError('Value \"%s\" not found in enum' %", "= 'open_%s' % domain if settings.contains(sname): path = settings.value(sname) name,", "if isinstance(parent, QtWidgets.QSplitter): container = QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout,", "__init__(self, *args, enum, value=None, **kwargs): super().__init__(*args, **kwargs) self._enum = enum", "self.setCurrentIndex(index) break else: raise ValueError('Value \"%s\" not found in enum'", "else: parent.addLayout(layout, *args, **kwargs) elif isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter):", "layout2.setContentsMargins(0, 0, 0, 0) layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager def", "in enum: self.addItem(enum.label(item), item) if value is not None: self.setValue(value)", "= [] @contextlib.contextmanager def _layout(self, cls, *args, **kwargs): layout =", "not name: return None if not name.endswith('.%s' % extension): name", "QtWidgets.QMessageBox.No: continue return None settings.setValue(sname, os.path.dirname(name)) return name def getOpenFilename(parent,", "def getOpenFilename(parent, domain, extension): with Settings().grouped('Paths') as settings: path =", "def _pop(self, *args, **kwargs): layout = self._stack.pop() if self._stack: parent", "= QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0, 0) layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout)", "ValueError('Value \"%s\" not found in enum' % str(value)) def _emit(self,", "layout finally: self._pop(*args, **kwargs) def _pop(self, *args, **kwargs): layout =", "*args, **kwargs) def form(self, *args, **kwargs): class _FormLayout(QtWidgets.QFormLayout): def addLayout(self,", "not name.endswith('.%s' % extension): name = '%s.%s' % (name, extension)", "'') if name: settings.setValue(sname, os.path.dirname(name)) return name return None class", "hbox(self, *args, **kwargs): # pragma: no cover with self._layout(QtWidgets.QHBoxLayout, *args,", "= QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s' % domain if settings.contains(sname): path", "def stack(self, *args, **kwargs): # pragma: no cover return self._layout(QtWidgets.QStackedLayout,", "widget=None): # pylint: disable=C0111 if isinstance(label, str): label = QtWidgets.QLabel(label)", "def __init__(self, *args, enum, value=None, **kwargs): super().__init__(*args, **kwargs) self._enum =", "name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path, '*.%s' % extension", "is None: super().addRow(label) else: super().addRow(label, widget) return self._layout(_FormLayout, *args, **kwargs)", "% (name, extension) if os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent, _('Overwrite file?'),", "**kwargs): # pragma: no cover with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as", "# pragma: no cover with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as layout:", "already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name))", "value == item: self.setCurrentIndex(index) break else: raise ValueError('Value \"%s\" not", "if resp == QtWidgets.QMessageBox.No: continue return None settings.setValue(sname, os.path.dirname(name)) return", "*args, **kwargs): # pragma: no cover with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs)", "self._layout(_FormLayout, *args, **kwargs) def split(self, *args, **kwargs): # pragma: no", "= cls() self._stack.append(layout) try: yield layout finally: self._pop(*args, **kwargs) def", "if settings.contains(sname): path = settings.value(sname) name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open", "if os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This file already", "None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self, value): for index, item in", "self._stack.pop() if self._stack: parent = self._stack[-1] if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout)", "path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s' % domain if settings.contains(sname):", "# pragma: no cover return self._layout(QtWidgets.QSplitter, *args, **kwargs) def getSaveFilename(parent,", "self.target.setCentralWidget(layout) else: container = QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else: if isinstance(layout,", "**kwargs): layout = self._stack.pop() if self._stack: parent = self._stack[-1] if", "disable=C0111 if isinstance(label, str): label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter)", "return None if not name.endswith('.%s' % extension): name = '%s.%s'", "enum, value=None, **kwargs): super().__init__(*args, **kwargs) self._enum = enum for item", "import QtCore, QtWidgets from dsrlib.settings import Settings class LayoutBuilder: def", "options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name: return None if not name.endswith('.%s' %", "layout @contextlib.contextmanager def vbox(self, *args, **kwargs): # pragma: no cover", "parent.addWidget(container) else: parent.addLayout(layout, *args, **kwargs) elif isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout,", "def setValue(self, value): for index, item in enumerate(self._enum): if value", "if name: settings.setValue(sname, os.path.dirname(name)) return name return None class EnumComboBox(QtWidgets.QComboBox):", "value is not None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self, value): for", "self._layout(QtWidgets.QSplitter, *args, **kwargs) def getSaveFilename(parent, domain, extension): with Settings().grouped('Paths') as", "file?'), _('This file already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp ==", "layout = self._stack.pop() if self._stack: parent = self._stack[-1] if isinstance(layout,", "self.addRow(layout) def addRow(self, label, widget=None): # pylint: disable=C0111 if isinstance(label,", "raise ValueError('Value \"%s\" not found in enum' % str(value)) def", "**kwargs) as layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout", "name = '%s.%s' % (name, extension) if os.path.exists(name): resp =", "QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget is None: super().addRow(label) else: super().addRow(label, widget)", "= settings.value(sname) while True: name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path,", "label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget is None:", "with Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s'", "isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else: if isinstance(parent, QtWidgets.QSplitter): container = QtWidgets.QWidget(parent)", "@contextlib.contextmanager def vbox(self, *args, **kwargs): # pragma: no cover with", "QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else: if isinstance(layout, QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout()", "# pragma: no cover return self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def form(self,", "_pop(self, *args, **kwargs): layout = self._stack.pop() if self._stack: parent =", "parent.addWidget(layout) else: if isinstance(parent, QtWidgets.QSplitter): container = QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container)", "cover with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1,", "= self._stack.pop() if self._stack: parent = self._stack[-1] if isinstance(layout, QtWidgets.QSplitter):", "self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1)", "(name, extension) if os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This", "= QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget is None: super().addRow(label)", "@contextlib.contextmanager def _layout(self, cls, *args, **kwargs): layout = cls() self._stack.append(layout)", "settings.setValue(sname, os.path.dirname(name)) return name return None class EnumComboBox(QtWidgets.QComboBox): valueChanged =", "*args, **kwargs): # pragma: no cover return self._layout(QtWidgets.QSplitter, *args, **kwargs)", "*args, **kwargs) def getSaveFilename(parent, domain, extension): with Settings().grouped('Paths') as settings:", "domain, extension): with Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname", "QtCore, QtWidgets from dsrlib.settings import Settings class LayoutBuilder: def __init__(self,", "container = QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else: if isinstance(layout, QtWidgets.QSplitter): layout2", "if isinstance(label, str): label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if", "sname = 'save_%s' % domain if settings.contains(sname): path = settings.value(sname)", "def __init__(self, target): self.target = target self._stack = [] @contextlib.contextmanager", "_('Open file'), path, '*.%s' % extension if extension else '')", "[] @contextlib.contextmanager def _layout(self, cls, *args, **kwargs): layout = cls()", "if self._stack: parent = self._stack[-1] if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else:", "% extension): name = '%s.%s' % (name, extension) if os.path.exists(name):", "super().addRow(label) else: super().addRow(label, widget) return self._layout(_FormLayout, *args, **kwargs) def split(self,", "continue return None settings.setValue(sname, os.path.dirname(name)) return name def getOpenFilename(parent, domain,", "__init__(self, target): self.target = target self._stack = [] @contextlib.contextmanager def", "vbox(self, *args, **kwargs): # pragma: no cover with self._layout(QtWidgets.QVBoxLayout, *args,", "value): for index, item in enumerate(self._enum): if value == item:", "item: self.setCurrentIndex(index) break else: raise ValueError('Value \"%s\" not found in", "return self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def form(self, *args, **kwargs): class _FormLayout(QtWidgets.QFormLayout):", "QtWidgets from dsrlib.settings import Settings class LayoutBuilder: def __init__(self, target):", "None: super().addRow(label) else: super().addRow(label, widget) return self._layout(_FormLayout, *args, **kwargs) def", "stack(self, *args, **kwargs): # pragma: no cover return self._layout(QtWidgets.QStackedLayout, *args,", "= QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else: if isinstance(layout, QtWidgets.QSplitter): layout2 =", "_FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout): self.addRow(layout) def addRow(self, label, widget=None): #", "file already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname,", "**kwargs) def form(self, *args, **kwargs): class _FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout):", "valueChanged = QtCore.pyqtSignal(object) def __init__(self, *args, enum, value=None, **kwargs): super().__init__(*args,", "= QtCore.pyqtSignal(object) def __init__(self, *args, enum, value=None, **kwargs): super().__init__(*args, **kwargs)", "parent.addLayout(layout, *args, **kwargs) elif isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout)", "self._enum = enum for item in enum: self.addItem(enum.label(item), item) if", "QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s' % domain if settings.contains(sname): path =", "try: yield layout finally: self._pop(*args, **kwargs) def _pop(self, *args, **kwargs):", "elif isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container =", "# pylint: disable=C0111 if isinstance(label, str): label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred,", "settings.value(sname) while True: name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s'", "_('Overwrite file?'), _('This file already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp", "QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget is None: super().addRow(label) else:", "extension) if os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This file", "QtCore.pyqtSignal(object) def __init__(self, *args, enum, value=None, **kwargs): super().__init__(*args, **kwargs) self._enum", "self.currentIndexChanged.connect(self._emit) def setValue(self, value): for index, item in enumerate(self._enum): if", "def _layout(self, cls, *args, **kwargs): layout = cls() self._stack.append(layout) try:", "self.target.setLayout(layout) @contextlib.contextmanager def hbox(self, *args, **kwargs): # pragma: no cover", "else: super().addRow(label, widget) return self._layout(_FormLayout, *args, **kwargs) def split(self, *args,", "enum: self.addItem(enum.label(item), item) if value is not None: self.setValue(value) self.currentIndexChanged.connect(self._emit)", "target): self.target = target self._stack = [] @contextlib.contextmanager def _layout(self,", "name return None class EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object) def __init__(self,", "split(self, *args, **kwargs): # pragma: no cover return self._layout(QtWidgets.QSplitter, *args,", "dsrlib.settings import Settings class LayoutBuilder: def __init__(self, target): self.target =", "name: settings.setValue(sname, os.path.dirname(name)) return name return None class EnumComboBox(QtWidgets.QComboBox): valueChanged", "else: raise ValueError('Value \"%s\" not found in enum' % str(value))", "0, 0) layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager def hbox(self, *args,", "if settings.contains(sname): path = settings.value(sname) while True: name, dummy =", "layout.setSpacing(1) yield layout @contextlib.contextmanager def vbox(self, *args, **kwargs): # pragma:", "extension): with Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname =", "def addRow(self, label, widget=None): # pylint: disable=C0111 if isinstance(label, str):", "def split(self, *args, **kwargs): # pragma: no cover return self._layout(QtWidgets.QSplitter,", "= QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not", "no cover return self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def form(self, *args, **kwargs):", "**kwargs) def getSaveFilename(parent, domain, extension): with Settings().grouped('Paths') as settings: path", "= QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This file already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel)", "return name if resp == QtWidgets.QMessageBox.No: continue return None settings.setValue(sname,", "in enumerate(self._enum): if value == item: self.setCurrentIndex(index) break else: raise", "self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self, value): for index, item in enumerate(self._enum):", "#!/usr/bin/env python3 import os import contextlib from PyQt5 import QtCore,", "else: if isinstance(parent, QtWidgets.QSplitter): container = QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else:", "class _FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout): self.addRow(layout) def addRow(self, label, widget=None):", "QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name:", "domain if settings.contains(sname): path = settings.value(sname) name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent,", "None class EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object) def __init__(self, *args, enum,", "**kwargs): super().__init__(*args, **kwargs) self._enum = enum for item in enum:", "1, 1, 1) layout.setSpacing(1) yield layout def stack(self, *args, **kwargs):", "settings.setValue(sname, os.path.dirname(name)) return name def getOpenFilename(parent, domain, extension): with Settings().grouped('Paths')", "pragma: no cover with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1,", "if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else: if isinstance(parent, QtWidgets.QSplitter): container =", "Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s' %", "if isinstance(layout, QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0, 0)", "self.target.setCentralWidget(container) else: if isinstance(layout, QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0,", "QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return name if", "'open_%s' % domain if settings.contains(sname): path = settings.value(sname) name, dummy", "value=None, **kwargs): super().__init__(*args, **kwargs) self._enum = enum for item in", "settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s' % domain if", "1) layout.setSpacing(1) yield layout def stack(self, *args, **kwargs): # pragma:", "setValue(self, value): for index, item in enumerate(self._enum): if value ==", "QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path, '*.%s' % extension if extension else", "0) layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager def hbox(self, *args, **kwargs):", "class LayoutBuilder: def __init__(self, target): self.target = target self._stack =", "*args, **kwargs): class _FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout): self.addRow(layout) def addRow(self,", "else: if isinstance(layout, QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0,", "from dsrlib.settings import Settings class LayoutBuilder: def __init__(self, target): self.target", "Settings class LayoutBuilder: def __init__(self, target): self.target = target self._stack", "*args, **kwargs): # pragma: no cover return self._layout(QtWidgets.QStackedLayout, *args, **kwargs)", "PyQt5 import QtCore, QtWidgets from dsrlib.settings import Settings class LayoutBuilder:", "widget is None: super().addRow(label) else: super().addRow(label, widget) return self._layout(_FormLayout, *args,", "super().addRow(label, widget) return self._layout(_FormLayout, *args, **kwargs) def split(self, *args, **kwargs):", "not found in enum' % str(value)) def _emit(self, _): self.valueChanged.emit(self.currentData())", "return self._layout(_FormLayout, *args, **kwargs) def split(self, *args, **kwargs): # pragma:", "as layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout def", "container.setLayout(layout) self.target.setCentralWidget(container) else: if isinstance(layout, QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0,", "name.endswith('.%s' % extension): name = '%s.%s' % (name, extension) if", "True: name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s' % extension,", "item) if value is not None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self,", "pylint: disable=C0111 if isinstance(label, str): label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)", "layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout def stack(self, *args,", "getOpenFilename(parent, domain, extension): with Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation)", "**kwargs): class _FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout): self.addRow(layout) def addRow(self, label,", "self._stack: parent = self._stack[-1] if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else: if", "**kwargs): # pragma: no cover with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as", "'save_%s' % domain if settings.contains(sname): path = settings.value(sname) while True:", "for index, item in enumerate(self._enum): if value == item: self.setCurrentIndex(index)", "QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return name if resp == QtWidgets.QMessageBox.No: continue", "'*.%s' % extension if extension else '') if name: settings.setValue(sname,", "contextlib from PyQt5 import QtCore, QtWidgets from dsrlib.settings import Settings", "layout = cls() self._stack.append(layout) try: yield layout finally: self._pop(*args, **kwargs)", "extension if extension else '') if name: settings.setValue(sname, os.path.dirname(name)) return", "Settings().grouped('Paths') as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s' %", "= settings.value(sname) name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path, '*.%s'", "'%s.%s' % (name, extension) if os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent, _('Overwrite", "with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1, 1)", "path = settings.value(sname) while True: name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'),", "QtWidgets.QSplitter): layout2 = QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0, 0) layout2.addWidget(layout) self.target.setLayout(layout2)", "== item: self.setCurrentIndex(index) break else: raise ValueError('Value \"%s\" not found", "import Settings class LayoutBuilder: def __init__(self, target): self.target = target", "QtWidgets.QSplitter): container = QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout, *args, **kwargs)", "*args, **kwargs) elif isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else:", "os.path.exists(name): resp = QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This file already exists.", "# pragma: no cover with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as layout:", "'*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name: return None if", "pragma: no cover with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1,", "**kwargs): layout = cls() self._stack.append(layout) try: yield layout finally: self._pop(*args,", "settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s' % domain if", "label, widget=None): # pylint: disable=C0111 if isinstance(label, str): label =", "self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager def hbox(self, *args, **kwargs): # pragma:", "cls, *args, **kwargs): layout = cls() self._stack.append(layout) try: yield layout", "\"%s\" not found in enum' % str(value)) def _emit(self, _):", "import contextlib from PyQt5 import QtCore, QtWidgets from dsrlib.settings import", "*args, **kwargs): # pragma: no cover with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs)", "form(self, *args, **kwargs): class _FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout): self.addRow(layout) def", "return name return None class EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object) def", "isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container = QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else:", "yield layout finally: self._pop(*args, **kwargs) def _pop(self, *args, **kwargs): layout", "EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object) def __init__(self, *args, enum, value=None, **kwargs):", "_layout(self, cls, *args, **kwargs): layout = cls() self._stack.append(layout) try: yield", "**kwargs): # pragma: no cover return self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def", "with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1, 1)", "return name def getOpenFilename(parent, domain, extension): with Settings().grouped('Paths') as settings:", "else: self.target.setLayout(layout) @contextlib.contextmanager def hbox(self, *args, **kwargs): # pragma: no", "isinstance(label, str): label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget", "addLayout(self, layout): self.addRow(layout) def addRow(self, label, widget=None): # pylint: disable=C0111", "name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite)", "**kwargs) self._enum = enum for item in enum: self.addItem(enum.label(item), item)", "None settings.setValue(sname, os.path.dirname(name)) return name def getOpenFilename(parent, domain, extension): with", "exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return", "% extension if extension else '') if name: settings.setValue(sname, os.path.dirname(name))", "isinstance(parent, QtWidgets.QSplitter): container = QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout, *args,", "cover with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1,", "os import contextlib from PyQt5 import QtCore, QtWidgets from dsrlib.settings", "*args, **kwargs): layout = cls() self._stack.append(layout) try: yield layout finally:", "QtWidgets.QHBoxLayout() layout2.setContentsMargins(0, 0, 0, 0) layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager", "settings.contains(sname): path = settings.value(sname) while True: name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent,", "path = settings.value(sname) name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path,", "0, 0, 0) layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager def hbox(self,", "if widget is None: super().addRow(label) else: super().addRow(label, widget) return self._layout(_FormLayout,", "settings.value(sname) name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path, '*.%s' %", "*args, **kwargs): layout = self._stack.pop() if self._stack: parent = self._stack[-1]", "Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if resp == QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return name", "1, 1, 1) layout.setSpacing(1) yield layout @contextlib.contextmanager def vbox(self, *args,", "pragma: no cover return self._layout(QtWidgets.QStackedLayout, *args, **kwargs) def form(self, *args,", "isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container = QtWidgets.QWidget(self.target)", "no cover with self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1,", "1, 1) layout.setSpacing(1) yield layout def stack(self, *args, **kwargs): #", "self._layout(QtWidgets.QVBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1)", "else: container = QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container) else: if isinstance(layout, QtWidgets.QSplitter):", "enumerate(self._enum): if value == item: self.setCurrentIndex(index) break else: raise ValueError('Value", "resp = QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This file already exists. Overwrite?'),", "= QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s' % domain if settings.contains(sname): path", "for item in enum: self.addItem(enum.label(item), item) if value is not", "% domain if settings.contains(sname): path = settings.value(sname) name, dummy =", "path, '*.%s' % extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name: return None", "os.path.dirname(name)) return name def getOpenFilename(parent, domain, extension): with Settings().grouped('Paths') as", "name def getOpenFilename(parent, domain, extension): with Settings().grouped('Paths') as settings: path", "QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout, *args, **kwargs) elif isinstance(self.target, QtWidgets.QMainWindow):", "class EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object) def __init__(self, *args, enum, value=None,", "**kwargs) elif isinstance(self.target, QtWidgets.QMainWindow): if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container", "def form(self, *args, **kwargs): class _FormLayout(QtWidgets.QFormLayout): def addLayout(self, layout): self.addRow(layout)", "container = QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout, *args, **kwargs) elif", "file'), path, '*.%s' % extension if extension else '') if", "sname = 'open_%s' % domain if settings.contains(sname): path = settings.value(sname)", "**kwargs) def split(self, *args, **kwargs): # pragma: no cover return", "not None: self.setValue(value) self.currentIndexChanged.connect(self._emit) def setValue(self, value): for index, item", "return self._layout(QtWidgets.QSplitter, *args, **kwargs) def getSaveFilename(parent, domain, extension): with Settings().grouped('Paths')", "def hbox(self, *args, **kwargs): # pragma: no cover with self._layout(QtWidgets.QHBoxLayout,", "label.setAlignment(QtCore.Qt.AlignVCenter) if widget is None: super().addRow(label) else: super().addRow(label, widget) return", "def addLayout(self, layout): self.addRow(layout) def addRow(self, label, widget=None): # pylint:", "os.path.dirname(name)) return name if resp == QtWidgets.QMessageBox.No: continue return None", "= QtWidgets.QWidget(parent) container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout, *args, **kwargs) elif isinstance(self.target,", "*args, **kwargs) as layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield", "QtWidgets.QMessageBox.question(parent, _('Overwrite file?'), _('This file already exists. Overwrite?'), QtWidgets.QMessageBox.Yes|QtWidgets.QMessageBox.No|QtWidgets.QMessageBox.Cancel) if", "no cover with self._layout(QtWidgets.QHBoxLayout, *args, **kwargs) as layout: layout.setContentsMargins(1, 1,", "def vbox(self, *args, **kwargs): # pragma: no cover with self._layout(QtWidgets.QVBoxLayout,", "while True: name, dummy = QtWidgets.QFileDialog.getSaveFileName(parent, _('Save'), path, '*.%s' %", "= self._stack[-1] if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else: if isinstance(parent, QtWidgets.QSplitter):", "settings.contains(sname): path = settings.value(sname) name, dummy = QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'),", "QtWidgets.QSplitter): parent.addWidget(layout) else: if isinstance(parent, QtWidgets.QSplitter): container = QtWidgets.QWidget(parent) container.setLayout(layout)", "item in enumerate(self._enum): if value == item: self.setCurrentIndex(index) break else:", "container.setLayout(layout) parent.addWidget(container) else: parent.addLayout(layout, *args, **kwargs) elif isinstance(self.target, QtWidgets.QMainWindow): if", "pragma: no cover return self._layout(QtWidgets.QSplitter, *args, **kwargs) def getSaveFilename(parent, domain,", "yield layout def stack(self, *args, **kwargs): # pragma: no cover", "layout2.addWidget(layout) self.target.setLayout(layout2) else: self.target.setLayout(layout) @contextlib.contextmanager def hbox(self, *args, **kwargs): #", "target self._stack = [] @contextlib.contextmanager def _layout(self, cls, *args, **kwargs):", "== QtWidgets.QMessageBox.Yes: settings.setValue(sname, os.path.dirname(name)) return name if resp == QtWidgets.QMessageBox.No:", "<filename>src/dsrlib/ui/utils.py<gh_stars>1-10 #!/usr/bin/env python3 import os import contextlib from PyQt5 import", "@contextlib.contextmanager def hbox(self, *args, **kwargs): # pragma: no cover with", "resp == QtWidgets.QMessageBox.No: continue return None settings.setValue(sname, os.path.dirname(name)) return name", "if isinstance(layout, QtWidgets.QSplitter): self.target.setCentralWidget(layout) else: container = QtWidgets.QWidget(self.target) container.setLayout(layout) self.target.setCentralWidget(container)", "parent = self._stack[-1] if isinstance(layout, QtWidgets.QSplitter): parent.addWidget(layout) else: if isinstance(parent,", "cover return self._layout(QtWidgets.QSplitter, *args, **kwargs) def getSaveFilename(parent, domain, extension): with", "settings.setValue(sname, os.path.dirname(name)) return name if resp == QtWidgets.QMessageBox.No: continue return", "import os import contextlib from PyQt5 import QtCore, QtWidgets from", "extension else '') if name: settings.setValue(sname, os.path.dirname(name)) return name return", "as settings: path = QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'save_%s' % domain", "if extension else '') if name: settings.setValue(sname, os.path.dirname(name)) return name", "domain if settings.contains(sname): path = settings.value(sname) while True: name, dummy", "layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout @contextlib.contextmanager def vbox(self,", "str): label = QtWidgets.QLabel(label) label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget is", "return None settings.setValue(sname, os.path.dirname(name)) return name def getOpenFilename(parent, domain, extension):", "LayoutBuilder: def __init__(self, target): self.target = target self._stack = []", "path, '*.%s' % extension if extension else '') if name:", "QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.DocumentsLocation) sname = 'open_%s' % domain if settings.contains(sname): path =", "% extension, options=QtWidgets.QFileDialog.DontConfirmOverwrite) if not name: return None if not", "widget) return self._layout(_FormLayout, *args, **kwargs) def split(self, *args, **kwargs): #", "super().__init__(*args, **kwargs) self._enum = enum for item in enum: self.addItem(enum.label(item),", "def getSaveFilename(parent, domain, extension): with Settings().grouped('Paths') as settings: path =", "layout.setSpacing(1) yield layout def stack(self, *args, **kwargs): # pragma: no", "return None class EnumComboBox(QtWidgets.QComboBox): valueChanged = QtCore.pyqtSignal(object) def __init__(self, *args,", "finally: self._pop(*args, **kwargs) def _pop(self, *args, **kwargs): layout = self._stack.pop()", "= QtWidgets.QFileDialog.getOpenFileName(parent, _('Open file'), path, '*.%s' % extension if extension", "label.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding) label.setAlignment(QtCore.Qt.AlignVCenter) if widget is None: super().addRow(label) else: super().addRow(label,", "if not name: return None if not name.endswith('.%s' % extension):", "layout: layout.setContentsMargins(1, 1, 1, 1) layout.setSpacing(1) yield layout @contextlib.contextmanager def" ]
[ "' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed to send report.", "Step(self, host_ip): for line in output_lines: file_name: str for file_name", "module_name # fill new module vars self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir", "fake_init self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) else: # for process", "= get_configuration_representation(cfg_options, configuration) self.current_test_name = self.current_test_method + configuration_representation else: self.current_test_name", "self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test setup method self.__call_test_setup_teardown('setup') #", "2.0 (the \"License\"); # you may not use this file", "Result from .util import write_yaml_file, should_be_skipped from .logger import *", "> 0: tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found %s tests", "self.__call_test_setup_teardown('setup') # self.__print_with_format() with Step(self, 'Execution'): try: call_method(self.test_class, self.current_test_method) finally:", "%s tests ***\" % ( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue')", "_skip_tests(self): test_plan = self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests) try: for current_test", "self.test_plan[self.test_module] for test_method_name in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param = {", "- creates instance of test case class resets self.all_tests, self.tests_to_execute,", "if type({}) != type(repeated_test_option): # if option was given as", "= True if hasattr(test_function, \"__skip_conds__\") and \\ len(test_function.__skip_conds__) > 0:", "if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class)", "'mute' in attribs: skip_msg = 'skipped cause test is MUTED'", "skip_test, skip_msg, skip_no_start def get_tests_results(self): return self.result def _save_config(self): write_yaml_file(self.config['config_path'],", "self.__prepare_test_vars(**test_param) test_cnt = test_cnt + 1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt,", "for test_name in sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt =", "sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s tests. %s skipped.", "self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute() except Exception as", "test_exception = e tb_msg = traceback.format_exc() except Exception as e:", "name of the module to prepare :param fake_init: do not", "exec_report: InnerReportConfig = getattr(self.test_class, '_secret_report_storage', None) test_report: InnerReportConfig = getattr(self,", "test_params['setup_test_params'] = True test_params['setup_test_method'] = setup_method # next, teardown fixture", "# don't rename tests when only one iteration requested test_param['repeated_test_name']", "= 1 self.current_test_method = test_method_name if hasattr(self.test_class, '__configurations__'): if cfg_options", "self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test setup method self.__call_test_setup_teardown('setup') # self.__print_with_format()", "in sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt = test_cnt +", "\"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") if known_issue: skip_msg = '{}", "import_module(\"suites.%s\" % self.test_module) # used for collect_only if fake_init: self.test_class", "self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: # { # '<suite_name>.<test_file_name>': {", "sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan = self.test_plan[self.test_module] for test_name in sorted(test_plan.tests_to_execute):", "%s\\n%s' % (self.test_module, self.test_class_name, fixture_name, str(e), str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s'", "requirements for applications for test_module in sorted(self.modules.keys()): module = import_module(\"suites.%s\"", "= True skip_msg = 'skipped due to repeated_test iterations <=", "from .tidenpluginmanager import PluginManager from .report.steps import step, InnerReportConfig, Step,", "# Tiden PluginManager instance pm = None # longest length", "if hasattr(self.test_class, '__configurations__'): if cfg_options is None: cfg_options = getattr(self.test_class,", "setup/teardown fixture. :param fixture_name: either 'setup' or 'teardown' :return: \"\"\"", "collect_test0(self): # collect test params test_params = { 'test_name': self.current_test_name,", "not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1)) break finally: self.current_test_name =", "tb_msg = traceback.format_exc() except Exception as e: test_status = 'error'", "this magic required to convert decorated test function to method", "teardown function didn't kill nodes if not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool)", "= test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name'] =", "in enumerate(tests_to_execute, start=1): test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count',", "i, cfg_option_name in enumerate(cfg_options) ])), color='blue') else: cfg_options = None", "get_tests_results(self): return self.result def _save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod def gen_tests(test_class):", "key=get_priority_key(self.test_class)) log_print(\"*** Found %s tests in %s. %s skipped. Going", "of attrib mismatch' skip_test = True skip_no_start = True if", "test_report.suites = suites setattr(self, '_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def", "self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try: create_remote_dir = [ 'mkdir -p %s/%s/%s'", "self.all_tests, self.tests_to_execute, self.skipped_tests config fills in config['rt'], config['rt']['remote'] Creates test", "as e: test_status = 'error' test_exception = e tb_msg =", "License for the specific language governing permissions and # limitations", "self.current_test_method = None def _run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception =", "module, e.g. test module file name without .py extension module_short_name", "self.test_module) # used for collect_only if fake_init: self.test_class = getattr(module,", "'_secret_report_storage', None) test_report: InnerReportConfig = getattr(self, '_secret_report_storage') idx_to_add = None", "dir(test_class): if class_attr.startswith('test_'): yield class_attr def collect_tests0(self, test_method_names): \"\"\" Collect", "(iteration {} from {})\".format(pad_string, self.test_iteration + 1, repeated_test_count), color='yellow') test_status", "self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) # Kill java process if", "test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt = test_cnt + 1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len,", "def process_tests(self): \"\"\" Run all tests :return: \"\"\" log_print(\"*** Tests", "Execute module setup setup_passed = self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute) #", "InnerReportConfig()) test_exception = None tb_msg = None test_status = 'pass'", "type(repeated_test_option): # if option was given as --to=repeated_test=N, re-decorate all", "local and remote directories. Copies resources from suite directory to", "idx, test_step in enumerate(test_report.steps): if test_step['status'] is None: idx_to_add =", "'__configuration_options__').copy() configurations = getattr(self.test_class, '__configurations__').copy() for configuration in configurations: #", "was given as --to=repeated_test=N, re-decorate all tests re_decorate = True", "tiden.sshpool import AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts': []}) def empty_init(self, config,", "this is for correct fail in Jenkins if not setup_passed:", "% (self.test_module, self.test_class_name, fixture_name, str(e), str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s' %", "the test name long_path_len = 0 # instance of Result", "= getattr(self.test_class, '__configuration_options__') configuration = get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options for", "either 'setup' or 'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed", "= {} # == for current test module: # a", "or framework option repeat_count = 1 # here, we check", "tests ***\" % ( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue') test_cnt", "test_status @step('logs') def __save_logs(self): test_dir = self.config.get('rt', {}).get('remote', {}).get('test_dir') if", "copyfile from os.path import join, basename from glob import glob", "known_issue = getattr(test_function, \"__known_issues__\") if known_issue: skip_msg = '{} cause", "TidenTestPlan: all_tests = None skipped_tests = None tests_to_execute = None", "exec_time from .result import Result from .util import write_yaml_file, should_be_skipped", "name long_path_len = 0 # instance of Result class result", "from .logger import * from .runner import get_test_modules, get_long_path_len, get_class_from_module,", "decorate test with @repeated_test automagically if that's required if self.config.get('repeated_test'):", "known_issue = getattr(test_function, \"__known_issues__\") test_params['known_issue'] = known_issue # test by", "test_report.steps[idx_to_add]['children'] = exec_report.steps + test_report.steps[idx_to_add].get('children', []) title = getattr(getattr(self.test_class, self.current_test_method),", "1: # don't rename tests when only one iteration requested", "# Copyright 2017-2020 GridGain Systems. # # Licensed under the", "TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module])", "is_skipped, skip_msg, skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count', 1)", "__get_pad_string(self, msg=None): return (\"%s.%s.%s \" % ( self.test_module, self.test_class_name, msg", "def collect_tests0(self, test_method_names): \"\"\" Collect given set of tests from", "= AbstractSshPool({'hosts': []}) def empty_init(self, config, ssh_pool): self.config = config", "self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')): skip_msg = 'skipped cause", "test module :param test_method_names: :param common_test_param: :return: \"\"\" try: test_plan", "def gen_tests(test_class): \"\"\" Generates all test method of given test", "def create_test_module_attr_yaml(self, test_method_names): # create attr.yaml for current_test_name in test_method_names:", "None tb_msg = None test_status = 'pass' pad_string = self.__get_pad_string()", "set to proper full name of module under 'suites' directory", "# don't forget known issues if hasattr(test_function, \"__known_issues__\"): known_issue =", "not hit collect report # Now generate results for 'executed'", "-H \"filename: {send_file_name}\" ' \\ f'-F \"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add'", "first setup fixture if hasattr(test_function, \"__setup__\"): setup_fixture = getattr(test_function, \"__setup__\")", "self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__,", "\"\"\" log_print(\"*** Collecting tests ***\", color='blue') long_path_len = get_long_path_len(self.modules) from", "0: tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found %s tests in", "test_status != 'pass': log_print(tb_msg, color='red') log_print(\"{} {} {}{}\".format(pad_string, test_status, exec_time(started),", "= self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir =", "Tiden config self.config['rt'] = { 'test_class': self.test_class_name, 'test_method': None, 'test_module':", "to 'run' %s tests ***\" % ( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute)", "tests - prepare test directory and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class", "create attr.yaml for current_test_name in test_method_names: test_function = getattr(self.test_class, current_test_name)", "module vars self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\" % (self.config['suite_var_dir'],", "1) if repeat_count > 0: if repeat_count == 1: #", "explicitly by decorator or framework option repeat_count = 1 #", "OF ANY KIND, either express or implied. # See the", "\"__known_issues__\") test_params['known_issue'] = known_issue # test by default runs only", "See the License for the specific language governing permissions and", "skip_no_start = False test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, \"__attrib__\"):", "self.module_short_name) remote_test_module_dir = \"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name)", "skip_no_start = True if hasattr(test_function, \"__skipped__\"): skip_msg = 'skipped cause", "self.current_test_method def collect_test0(self): # collect test params test_params = {", "(int(time() - started)), current_method_name=fixture_name) log_print('Exception in %s.%s.%s: %s\\n%s' % (self.test_module,", "to in writing, software # distributed under the License is", "hasattr(test_function, \"__skip_cond__\"): skip_condition = getattr(test_function, \"__skip_cond__\") conditions_met, skip_message = skip_condition(self.config)", "!= type(repeated_test_option): # if option was given as --to=repeated_test=N, re-decorate", "self.test_class - creates instance of test case class resets self.all_tests,", "def __create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}') def", "all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] =", "test_dir_name ) self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try: create_remote_dir", "should_be_skipped from .logger import * from .runner import get_test_modules, get_long_path_len,", "report_config['upload_logs'] else: return if test_dir: try: for host_ip, output_lines in", "log_print(\"%s %s\" % (pad_string, test_param['skip_msg']), color='yellow') finally: self.current_test_name = None", "or agreed to in writing, software # distributed under the", "log_print(\"Configuration options for %s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([ '\\t' + cfg_option_name", "python3 # # Copyright 2017-2020 GridGain Systems. # # Licensed", "None) test_report: InnerReportConfig = getattr(self, '_secret_report_storage') idx_to_add = None for", "(pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self, msg='', current_method_name=''): if not", "== TidenTestPlan for all modules: total = None # dictionary", "{})\".format(pad_string, self.test_iteration + 1, repeated_test_count), color='yellow') test_status = self._run_test() if", "\"\"\" Run all tests :return: \"\"\" log_print(\"*** Tests ***\", color='blue')", "get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config = config self.long_path_len = get_long_path_len(self.modules) xunit_path_var =", "{}).get('test_dir') if 'WardReport' in self.config.get('plugins', []): report_config = self.config['plugins']['WardReport'] files_receiver_url", "skip_no_start = False if is_skipped: test_param.update({ 'skip_msg': skip_msg, 'skip_no_start': skip_no_start,", "cfg_options, }) test_plan = self.test_plan[self.test_module] if len(test_plan.skipped_tests) > 0: self._skip_tests()", "result = None # current test module, a key to", "Tiden config dictionary config = None # Tiden SshPool instance", "known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) # Kill java", "msg=None): return (\"%s.%s.%s \" % ( self.test_module, self.test_class_name, msg if", "True if hasattr(test_function, \"__skip_cond__\"): skip_condition = getattr(test_function, \"__skip_cond__\") conditions_met, skip_message", "Copy resources in test resource directory :return: \"\"\" test_resource_dir =", "configuration options current_test_name = None # test method name only", "[]): report_config = self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url'] upload_logs = report_config['upload_logs']", "setup_fixture) else: setup_method = setup_fixture test_params['setup_test_params'] = True test_params['setup_test_method'] =", "compliance with the License. # You may obtain a copy", "self.result = Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool = kwargs.get('ssh_pool') self.pm: PluginManager =", "import path, mkdir from time import time from shutil import", "from .runner import set_configuration_options, get_configuration_representation, get_actual_configuration from importlib import import_module", "f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir", "working local and remote directories. Copies resources from suite directory", "color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources in test resource directory", "skip_msg, skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count', 1) if", "self.__print_with_format() with Step(self, 'Execution'): try: call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs()", "test_module_dir, 'remote': { 'test_module_dir': remote_test_module_dir, } } module = import_module(\"suites.%s\"", "tests when only one iteration requested test_param['repeated_test_name'] = [] else:", "host_ip): for line in output_lines: file_name: str for file_name in", "tests from test module for all configurations :param test_method_names: :return:", "'configuration': configuration, 'cfg_options': cfg_options, }) test_plan = self.test_plan[self.test_module] if len(test_plan.skipped_tests)", "skipped_tests = None tests_to_execute = None def __init__(self): self.all_tests =", "+ 1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass') def process_tests(self):", "not use this file except in compliance with the License.", "self.test_class.__init__ = fake_init self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) else: #", "pad_string = self.__get_pad_string() started = int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class,", "original_names = original_test.repeated_test_name decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: # that's", "logs\\n{traceback.format_exc()}', color='pink') # if exception in setup method then re-raise", "started)), current_method_name=fixture_name) log_print('Exception in %s.%s.%s: %s\\n%s' % (self.test_module, self.test_class_name, fixture_name,", "config, # so that test can check options and skip", "you may not use this file except in compliance with", "current_test_name = None # test method name only current_test_method =", "= test_param.get('continue_on_fail') test_with_iterations = True if repeated_test_count > 1 else", "if kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path') elif config.get('var_dir') and config.get('xunit_file'): xunit_path_var", "self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations =", "for idx, test_step in enumerate(test_report.steps): if test_step['status'] is None: idx_to_add", "= known_issue # test by default runs only once, #", "return (\"%s.%s.%s \" % ( self.test_module, self.test_class_name, msg if msg", "not test_method_name: return self.test_iteration = 1 self.current_test_method = test_method_name if", "(pad_string, test_param['skip_msg']), color='yellow') finally: self.current_test_name = None self.current_test_method = None", "that test can check options and skip itself set_configuration_options(cfg_options, self.config,", "if test_dir: try: for host_ip, output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with", "if not setup_passed: exit(1) def create_test_module_attr_yaml(self, test_method_names): # create attr.yaml", "skip itself set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options':", "Execute module teardown self.__call_module_setup_teardown('teardown') # this is for correct fail", "self.test_class.tiden.ssh = self.ssh_pool self.test_class.config = self.config self.test_class.ssh = self.ssh_pool self._save_config()", "known_issue = None if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\")", "once, # unless repeated_test_count set explicitly by decorator or framework", "copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir)", "unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}')", "= True repeat_count = int(repeated_test_option) elif self.current_test_method in repeated_test_option.keys(): #", "= sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s tests. %s", "= getattr(self.test_class, self.current_test_method) if hasattr(test_function, 'repeated_test_count'): repeat_count = test_function.repeated_test_count repeated_test_name", "test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name'] = repeated_test_name", "other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: # { # '<suite_name>.<test_file_name>':", "len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name in tests_to_execute ])), color='blue')", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "time import time from shutil import copyfile from os.path import", "'repeated_test_name'): # that test was previously decorated by @repeated_test, extract", "cause of repeated_tests decorator if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name =", "of the attribute # then skip it. if 'mute' in", "test_param['repeated_test_name'] = [] else: # rare case, skip by --to=repeated_test.test_name=0", "by @repeated_test, extract original test_names original_names = original_test.repeated_test_name decorated_test =", "*args) def __update_config_and_save(self, current_method_name=None): test_method = current_method_name if current_method_name else", "% skip_message skip_test = True if hasattr(test_function, \"__skip_conds__\") and \\", "self.config.get('repeated_test_continue_on_fail', False) return test_params def _skip_tests(self): test_plan = self.test_plan[self.test_module] skipped_tests", "for test_cnt, current_test in enumerate(tests_to_execute, start=1): test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param)", "is_skipped: test_param.update({ 'skip_msg': skip_msg, 'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else: if", "if hasattr(original_test, 'repeated_test_name'): # that test was previously decorated by", "for self.test_iteration in range(repeated_test_count): if test_with_iterations: log_print(\"{} started (iteration {}", "skip_condition(self.test_class) if not conditions_met: skip_msg = 'skipped cause of %s'", "InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name)", "e: test_status = 'error' test_exception = e tb_msg = traceback.format_exc()", "fixture_name) return fixture_passed def _call_plugin_manager(self, execution_point): args = [self.test_module, self.test_class]", "common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) def collect_tests1(self, test_method_names, common_test_param={}):", "if method_name == 'setup': raise e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def", "self.current_test_method = None self.current_test_name = None def __print_found_test_method_to_execute(self, long_path_len, test_cnt,", "test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test", "= self.current_test_method + configuration_representation else: self.current_test_name = self.current_test_method def collect_test0(self):", "one of the attribute # then skip it. if 'mute'", "None self.current_test_name = None def __print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module): method_long_name", "if repeated_test_count > 1 else False pad_string = self.__get_pad_string() log_print(\"%s", "not None: self.modules = kwargs.get('modules') else: self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only'))", "unix_path, call_method, create_case, kill_stalled_java, exec_time from .result import Result from", "self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\", color='green') except (AssertionError,", "skip_msg = 'skipped cause of attrib mismatch' skip_test = True", "ssh_pool): self.config = config self.ssh = ssh_pool self.__prepare_session_vars() for test_module", "'test_class': self.test_class_name, 'test_method': None, 'test_module': self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir,", "self.current_test_method), '__report_title__', None) suites = getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None) if", "enumerate(tests_to_execute, start=1): test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count', 1)", "of module under 'suites' directory sets up self.test_class_name self.module_short_name self.test_class", "it. if 'mute' in attribs: skip_msg = 'skipped cause test", "self.__prepare_session_vars() for test_module in sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module]", "title test_report.suites = suites setattr(self, '_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig())", "from traceback import format_exc from .runner import set_configuration_options, get_configuration_representation, get_actual_configuration", "# 'module_short_name': <test_file_name>, # } # } modules = None", "name of module under 'suites' directory sets up self.test_class_name self.module_short_name", "given set of tests from test module for all configurations", "if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method']", "**kwargs): if kwargs.get('modules', None) is not None: self.modules = kwargs.get('modules')", "test method name only current_test_method = None def __init__(self, config,", "\"__skip_cond__\"): skip_condition = getattr(test_function, \"__skip_cond__\") conditions_met, skip_message = skip_condition(self.config) if", "color='blue') test_cnt = 0 # Skipped tests do not hit", "if teardown function didn't kill nodes if not hasattr(self.test_class, 'keep_ignite_between_tests'):", "1 # here, we check --to=repeated_test=N and --to=repeated_test.test_name=N options #", "kwargs.get('plugin_manager') def collect_tests(self): \"\"\" Collect tests from all modules. \"\"\"", "test_method test_dir_name = test_method_name all_tests = self.test_plan[self.test_module].all_tests # cause of", "= None # Tiden config dictionary config = None #", "= \"%s/res\" % self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] =", "runs only once, # unless repeated_test_count set explicitly by decorator", "self.config['rt']['test_class'], test_dir_name) try: create_remote_dir = [ 'mkdir -p %s/%s/%s' %", "due to repeated_test iterations <= 0' skip_no_start = False if", "% ( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue') test_cnt = 0", "given configuration to Tiden config, # so that test can", "if not conditions_met: skip_msg = 'skipped cause of %s' %", "False test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, \"__attrib__\"): attribs =", "issues if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") test_params['known_issue'] =", "if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy() finally: self.current_test_method =", "run %s tests ***\\n%s\" % ( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute),", "self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count', 1) if repeat_count > 0:", "@step('{method_name}') def __call_test_setup_teardown(self, method_name): method_to_execute = None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests", "self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try:", "test module: # a short name of test module, e.g.", "= self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute) # Execute module teardown self.__call_module_setup_teardown('teardown')", "config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool = kwargs.get('ssh_pool') self.pm: PluginManager", "skip_msg = 'skipped cause test is MUTED' known_issue = None", "upload_logs: cmd = f'cd {test_dir}; ' \\ f'curl -H \"filename:", "self.__save_logs() except: log_print(f'Failed to get logs\\n{traceback.format_exc()}', color='pink') # if exception", "is for correct fail in Jenkins if not setup_passed: exit(1)", "method_long_name.ljust(long_path_len, '.') log_print(\"%s found (%s from %s)\" % (pad_string, test_cnt,", "config dictionary config = None # Tiden SshPool instance ssh_pool", "correct fail in Jenkins if not setup_passed: exit(1) def create_test_module_attr_yaml(self,", "len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name in tests_to_execute ])), color='blue') #", "fixture_name): started = time() try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute", "= None skip_no_start = False test_function = getattr(self.test_class, self.current_test_method) if", "as we should fail the test if method_name == 'setup':", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "glob import traceback class TidenTestPlan: all_tests = None skipped_tests =", "= \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'],", "= None test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options':", "basename from glob import glob import traceback class TidenTestPlan: all_tests", "directory and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool)", "= Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool = kwargs.get('ssh_pool') self.pm: PluginManager = kwargs.get('plugin_manager')", ".py extension module_short_name = None # a name of module'", "call_method, create_case, kill_stalled_java, exec_time from .result import Result from .util", "test if method_name == 'setup': raise e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}')", "\" % (test_module, self.test_class_name, self.current_test_name) pad_string = method_long_name.ljust(long_path_len, '.') log_print(\"%s", "# Kill java process if teardown function didn't kill nodes", "test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count', 1) if repeat_count > 0: if", "= '{} cause of {}'.format(skip_msg, known_issue) skip_test = True skip_no_start", "if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") test_params['known_issue'] = known_issue", "# Skipped tests do not hit collect report # Now", "except Exception as e: fixture_passed = False self.__print_with_format('failed in %s", "file except in compliance with the License. # You may", "setattr(self, '_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name): \"\"\"", "getattr(test_function, \"__known_issues__\") test_params['known_issue'] = known_issue # test by default runs", "from .report.steps import step, InnerReportConfig, Step, add_attachment, AttachmentType from .util", "limitations under the License. from .tidenpluginmanager import PluginManager from .report.steps", "methods: if hasattr(self.test_class, '__configurations__'): cfg_options = getattr(self.test_class, '__configuration_options__') configuration =", "setup setup_passed = self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute) # Execute module", "so that test can check options and skip itself set_configuration_options(cfg_options,", "re_decorate: from tiden.util import repeated_test original_test = test_function if hasattr(original_test,", "# a name of module' test class test_class_name = None", "color='red') self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info')", "tests do not hit collect report # Now generate results", "'test_method': None, 'test_module': self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir, 'remote': {", "= test_param.get('repeated_test_count', 1) if repeat_count > 0: if repeat_count ==", "self.total = TidenTestPlan() def __prepare_module_vars(self, module_name, fake_init=None): \"\"\" Prepare per-module", "License. from .tidenpluginmanager import PluginManager from .report.steps import step, InnerReportConfig,", "log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self, msg=None): return (\"%s.%s.%s", "else self.current_test_method test_method_name = test_method.split('(')[0] if '(' in test_method else", "# unless repeated_test_count set explicitly by decorator or framework option", "finally: self._call_plugin_manager('after_test_class_%s' % fixture_name) return fixture_passed def _call_plugin_manager(self, execution_point): args", "not conditions_met: skip_msg = 'skipped cause of %s' % skip_message", "finally: self.current_test_method = None self.current_test_name = None def __print_found_test_method_to_execute(self, long_path_len,", "% ( datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self, msg=None): return (\"%s.%s.%s \"", "else: # for process tests - prepare test directory and", "self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't create symlink to current test\", color='red')", "e: log_print(f'!!! Exception in {method_name} code !!!', color='red') log_print(traceback.format_exc()) try:", "None) suites = getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None) if title: test_report.title", "finally: self.current_test_name = None self.current_test_method = None def _run_test(self): setattr(self,", "given tests from current test module :param test_method_names: :param common_test_param:", "else: # rare case, skip by --to=repeated_test.test_name=0 is_skipped = True", "self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self,", "self.pm.do(execution_point, *args) def __update_config_and_save(self, current_method_name=None): test_method = current_method_name if current_method_name", "test_method_names: :param common_test_param: :return: \"\"\" try: test_plan = self.test_plan[self.test_module] for", "( self.test_module, self.test_class_name, msg if msg else self.current_test_method)) \\ .ljust(self.long_path_len,", "= idx break test_report.steps[idx_to_add]['children'] = exec_report.steps + test_report.steps[idx_to_add].get('children', []) title", "case class resets self.all_tests, self.tests_to_execute, self.skipped_tests config fills in config['rt'],", "self.test_class_name) self.test_class.__init__ = fake_init self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) else:", "under the License. from .tidenpluginmanager import PluginManager from .report.steps import", "test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False) return test_params def", "setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception = None tb_msg = None test_status", "= 0 # instance of Result class result = None", "self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url'] upload_logs = report_config['upload_logs'] else: return if", "in Jenkins if not setup_passed: exit(1) def create_test_module_attr_yaml(self, test_method_names): #", "for configuration in configurations: # set configuration options from given", "execution_point): args = [self.test_module, self.test_class] if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args)", "marked with one of the attribute # then skip it.", "test within module: # test name, with all configuration options", "repeat_count = test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name']", "test_method_names: test_function = getattr(self.test_class, current_test_name) create_case(test_function) def __prepare_session_vars(self): self.test_plan =", "(self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln -sfn %s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home'])", "create_test_module_attr_yaml(self, test_method_names): # create attr.yaml for current_test_name in test_method_names: test_function", "name matches given option re_decorate = True repeat_count = int(repeated_test_option[self.current_test_method])", "all modules. \"\"\" log_print(\"*** Collecting tests ***\", color='blue') long_path_len =", "<= 0' skip_no_start = False if is_skipped: test_param.update({ 'skip_msg': skip_msg,", "test_plan = self.test_plan[self.test_module] for test_method_name in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param", "test module file name without .py extension module_short_name = None", "tests for test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan = self.test_plan[self.test_module]", "test_names original_names = original_test.repeated_test_name decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: #", "self.test_class_name, self.current_test_name) pad_string = method_long_name.ljust(long_path_len, '.') log_print(\"%s found (%s from", "KIND, either express or implied. # See the License for", "self.current_test_method) # first setup fixture if hasattr(test_function, \"__setup__\"): setup_fixture =", "def __call_test_setup_teardown(self, method_name): method_to_execute = None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests =", "self.current_test_name, } test_function = getattr(self.test_class, self.current_test_method) # first setup fixture", "lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) # Kill java process if teardown", "repeated_tests decorator if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format( test_method_name,", "hasattr(test_function, 'repeated_test_count'): repeat_count = test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count'] =", "# Execute test teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg,", "import copyfile from os.path import join, basename from glob import", "= get_actual_configuration(self.config, cfg_options) configuration_representation = get_configuration_representation(cfg_options, configuration) self.current_test_name = self.current_test_method", "exit(1) def create_test_module_attr_yaml(self, test_method_names): # create attr.yaml for current_test_name in", "skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\" % (pad_string, test_param['skip_msg']), color='yellow') finally:", "add_attachment, AttachmentType from .util import log_print, unix_path, call_method, create_case, kill_stalled_java,", "Collecting tests ***\", color='blue') long_path_len = get_long_path_len(self.modules) from tiden.sshpool import", "(the \"License\"); # you may not use this file except", "int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module,", "e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info') else None )", "test_method = current_method_name if current_method_name else self.current_test_method test_method_name = test_method.split('(')[0]", "dictionary of TidenTestPlan indexed by test module name test_plan =", "sets up self.test_class_name self.module_short_name self.test_class - creates instance of test", "= getattr(module, self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() # Set", "attribs.append(str(self.current_test_method)) # if attr is passed to runner and test", "configuration_representation else: self.current_test_name = self.current_test_method def collect_test0(self): # collect test", "\"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") test_params['known_issue'] = known_issue # test", "convert decorated test function to method of a test class", "{exec_time(started)}\", color='green') except (AssertionError, TidenException) as e: test_status = 'fail'", "suite directory to local test module working directory. :param module_name:", "started = int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try:", "'__configurations__').copy() for configuration in configurations: # set configuration options from", "# # Unless required by applicable law or agreed to", "def empty_init(self, config, ssh_pool): self.config = config self.ssh = ssh_pool", "in %s. %s skipped. Going to run %s tests ***\\n%s\"", "to convert decorated test function to method of a test", "'_secret_report_storage')) # Kill java process if teardown function didn't kill", "module, a key to self.modules dictionary test_module = None #", "(test_module, self.test_class_name, self.current_test_name) pad_string = method_long_name.ljust(long_path_len, '.') log_print(\"%s found (%s", "self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir'] = \"{}/{}/{}\".format(", "class resets self.all_tests, self.tests_to_execute, self.skipped_tests config fills in config['rt'], config['rt']['remote']", "from .result import Result from .util import write_yaml_file, should_be_skipped from", "class_attr.startswith('test_'): yield class_attr def collect_tests0(self, test_method_names): \"\"\" Collect given set", "current test module: # a short name of test module,", "getattr(module, test_class_name)(self.config, self.ssh_pool) if hasattr(test_class, 'check_requirements'): test_class.check_requirements() for test_module in", "passed to runner and test is not marked with one", "False skip_msg = None skip_no_start = False test_function = getattr(self.test_class,", "repeated_test_count > 1 else False pad_string = self.__get_pad_string() log_print(\"%s started", "self.config = config self.long_path_len = get_long_path_len(self.modules) xunit_path_var = None if", "call_method(self.test_class, fixture_name) self.__print_with_format('finished in %s sec' % (int(time() - started)),", "in config['rt'], config['rt']['remote'] Creates test module working local and remote", "a key to self.modules dictionary test_module = None # ==", "test_module) self.result.stop_testcase('pass') def process_tests(self): \"\"\" Run all tests :return: \"\"\"", "class TidenTestPlan: all_tests = None skipped_tests = None tests_to_execute =", "implied. # See the License for the specific language governing", "self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan = self.test_plan[self.test_module] for test_name in sorted(test_plan.tests_to_execute): test_param", "log_print(\"{} {} {}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status, e=test_exception,", "color='yellow') def __print_with_format(self, msg='', current_method_name=''): if not current_method_name: if self.current_test_method:", "self.current_test_method, MethodType(decorated_test, self.test_class)) test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, 'repeated_test_count'):", "all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else:", "- prepare test directory and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class =", "= self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count', 1) if repeat_count >", "# and decorate test with @repeated_test automagically if that's required", "test_names=original_names)(original_test.__func__) else: # that's a brand new decoration decorated_test =", "= [] else: # rare case, skip by --to=repeated_test.test_name=0 is_skipped", "found (%s from %s)\" % (pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow') def", "self.collect_tests1(test_method_names) else: cfg_options = getattr(self.test_class, '__configuration_options__').copy() configurations = getattr(self.test_class, '__configurations__').copy()", "if attr is passed to runner and test is not", "% self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\" %", "type(''): setup_method = getattr(self.test_class, setup_fixture) else: setup_method = setup_fixture test_params['setup_test_params']", "config = None # Tiden SshPool instance ssh_pool = None", "_save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod def gen_tests(test_class): \"\"\" Generates all test", "> 0: skip_conditions = test_function.__skip_conds__ for skip_condition in skip_conditions: conditions_met,", "for collect_only if fake_init: self.test_class = getattr(module, self.test_class_name) self.test_class.__init__ =", "None self.current_test_method = None def _run_tests(self, tests_to_execute): test_plan = self.test_plan[self.test_module]", "itself set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options,", "])), color='blue') else: cfg_options = None configuration = None test_method_names", "test method of given test class :param test_class: :return: \"\"\"", "class from types import MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class)) test_function", "self.ssh_pool = AbstractSshPool({'hosts': []}) def empty_init(self, config, ssh_pool): self.config =", "as e: except Exception as e: fixture_passed = False self.__print_with_format('failed", "if kwargs.get('modules', None) is not None: self.modules = kwargs.get('modules') else:", "directory :return: \"\"\" test_resource_dir = \"%s/res\" % self.config['rt']['test_module_dir'] if not", "log_print(f'!!! Exception in {method_name} code !!!', color='red') log_print(traceback.format_exc()) try: self.__save_logs()", "if hasattr(test_function, \"__skip_conds__\") and \\ len(test_function.__skip_conds__) > 0: skip_conditions =", "def _check_test_for_skip(self): attribs = [] skip_test = False skip_msg =", "# Update Tiden config self.config['rt'] = { 'test_class': self.test_class_name, 'test_method':", "fake_init: self.test_class = getattr(module, self.test_class_name) self.test_class.__init__ = fake_init self.test_class =", "tests in %s. %s skipped. Going to run %s tests", "= [] skip_test = False skip_msg = None skip_no_start =", "if '(' in test_method else test_method test_dir_name = test_method_name all_tests", ".runner import get_test_modules, get_long_path_len, get_class_from_module, known_issue_str from .priority_decorator import get_priority_key", "test_method else test_method test_dir_name = test_method_name all_tests = self.test_plan[self.test_module].all_tests #", "color='blue') long_path_len = get_long_path_len(self.modules) from tiden.sshpool import AbstractSshPool self.ssh_pool =", "= False skip_msg = None skip_no_start = False test_function =", "self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class, 'tiden'):", "self.test_class_name, str(test_dir_name)), 'ln -sfn %s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ]", "module name test_plan = {} # == for current test", "then re-raise the exception as we should fail the test", "test_cnt, test_module) self.result.stop_testcase('pass') def process_tests(self): \"\"\" Run all tests :return:", "configurations: # set configuration options from given configuration to Tiden", "hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status @step('logs') def __save_logs(self): test_dir =", "import time from shutil import copyfile from os.path import join,", "line in output_lines: file_name: str for file_name in line.split('\\n'): if", "self.__update_config_and_save(current_method_name=fixture_name) # Execute setup or teardown method call_method(self.test_class, fixture_name) self.__print_with_format('finished", "Unless required by applicable law or agreed to in writing,", "None # a name of module' test class test_class_name =", "= get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module, test_class_name)(self.config, self.ssh_pool) if hasattr(test_class, 'check_requirements'):", "skip_no_start def get_tests_results(self): return self.result def _save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod", "__save_logs(self): test_dir = self.config.get('rt', {}).get('remote', {}).get('test_dir') if 'WardReport' in self.config.get('plugins',", "])), color='blue') # Execute module setup setup_passed = self.__call_module_setup_teardown('setup') if", "Exception in {method_name} code !!!', color='red') log_print(traceback.format_exc()) try: self.__save_logs() except:", "method of a test class from types import MethodType setattr(self.test_class,", "for skip_condition in skip_conditions: conditions_met, skip_message = skip_condition(self.test_class) if not", "if hasattr(test_function, \"__skip_cond__\"): skip_condition = getattr(test_function, \"__skip_cond__\") conditions_met, skip_message =", "the specific language governing permissions and # limitations under the", "and decorate test with @repeated_test automagically if that's required if", "# Execute setup or teardown method call_method(self.test_class, fixture_name) self.__print_with_format('finished in", "and --to=repeated_test.test_name=N options # and decorate test with @repeated_test automagically", "decoration decorated_test = repeated_test(repeat_count)(original_test.__func__) # this magic required to convert", "from test module for all configurations :param test_method_names: :return: \"\"\"", "= { 'test_method_name': test_method_name, } is_skipped, skip_msg, skip_no_start = self._check_test_for_skip()", "self.ssh_pool) if hasattr(test_class, 'check_requirements'): test_class.check_requirements() for test_module in sorted(self.modules.keys()): #", "\"\"\" Copy resources in test resource directory :return: \"\"\" test_resource_dir", "else test_method test_dir_name = test_method_name all_tests = self.test_plan[self.test_module].all_tests # cause", "setup_fixture test_params['setup_test_params'] = True test_params['setup_test_method'] = setup_method # next, teardown", "if type(setup_fixture) == type(''): setup_method = getattr(self.test_class, setup_fixture) else: setup_method", "skip_conditions: conditions_met, skip_message = skip_condition(self.test_class) if not conditions_met: skip_msg =", "and file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name, AttachmentType.FILE) if", "for test_name in tests_to_execute ])), color='blue') # Execute module setup", "(AssertionError, TidenException) as e: except Exception as e: fixture_passed =", "finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\", color='green') except (AssertionError, TidenException)", "test module, e.g. test module file name without .py extension", "configuration, 'cfg_options': cfg_options, }) test_plan = self.test_plan[self.test_module] if len(test_plan.skipped_tests) >", "otherwise re-decorate only if test name matches given option re_decorate", "= self.test_plan[self.test_module] if len(test_plan.skipped_tests) > 0: self._skip_tests() if len(test_plan.tests_to_execute) >", "setup or teardown method call_method(self.test_class, fixture_name) self.__print_with_format('finished in %s sec'", "test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt = test_cnt + 1 self.result.start_testcase(self.test_class,", "skipped. Going to 'run' %s tests ***\" % ( len(self.total.all_tests),", "hasattr(test_class, 'check_requirements'): test_class.check_requirements() for test_module in sorted(self.modules.keys()): # cleanup instance", "1) repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations = True if repeated_test_count >", "# cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name()", "in range(repeated_test_count): if test_with_iterations: log_print(\"{} started (iteration {} from {})\".format(pad_string,", "= current_method_name if current_method_name else self.current_test_method test_method_name = test_method.split('(')[0] if", ":return: \"\"\" try: test_plan = self.test_plan[self.test_module] for test_method_name in test_method_names:", "fixture_name: either 'setup' or 'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name)", "traceback.format_exc() except Exception as e: test_status = 'error' test_exception =", "test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan = self.test_plan[self.test_module] for test_name", "test_module in sorted(self.modules.keys()): module = import_module(\"suites.%s\" % test_module) test_class_name =", "getattr(self.test_class, self.current_test_method) if hasattr(test_function, 'repeated_test_count'): repeat_count = test_function.repeated_test_count repeated_test_name =", "getattr(self.test_class, '__configuration_options__').copy() configurations = getattr(self.test_class, '__configurations__').copy() for configuration in configurations:", "= self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts',", "if not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status @step('logs') def __save_logs(self):", "config.get('var_dir') and config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'), config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var)", "= e tb_msg = traceback.format_exc() finally: if test_status != 'pass':", "test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail =", "= int(repeated_test_option) elif self.current_test_method in repeated_test_option.keys(): # otherwise re-decorate only", "None: configuration = get_actual_configuration(self.config, cfg_options) configuration_representation = get_configuration_representation(cfg_options, configuration) self.current_test_name", "fill new module vars self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\"", "'{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'],", "= 'error' test_exception = e tb_msg = traceback.format_exc() finally: if", "} modules = None # Tiden config dictionary config =", "unless repeated_test_count set explicitly by decorator or framework option repeat_count", "= getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) # if attr is passed to", "name only current_test_method = None def __init__(self, config, **kwargs): if", "function to method of a test class from types import", ":param fixture_name: either 'setup' or 'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s' %", "for test_method_name in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param = { 'test_method_name':", "= self.ssh_pool self.test_class.config = self.config self.test_class.ssh = self.ssh_pool self._save_config() def", "mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:]) for file in", "Jenkins if not setup_passed: exit(1) def create_test_module_attr_yaml(self, test_method_names): # create", "= \"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:]) for file in glob(\"%s/*\" %", "cfg_options) configuration_representation = get_configuration_representation(cfg_options, configuration) self.current_test_name = self.current_test_method + configuration_representation", "can check options and skip itself set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names,", "self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass') def process_tests(self): \"\"\" Run", "repeated_test_option = self.config['repeated_test'] re_decorate = False if type({}) != type(repeated_test_option):", "None test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options,", "def update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: # {", "framework option repeat_count = 1 # here, we check --to=repeated_test=N", "_call_plugin_manager(self, execution_point): args = [self.test_module, self.test_class] if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point,", "%s)\" % (pad_string, test_cnt, len(tests_to_execute)), color='yellow') for self.test_iteration in range(repeated_test_count):", "test_param['skip_msg']), color='yellow') finally: self.current_test_name = None self.current_test_method = None def", "known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class,", "return self.result def _save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod def gen_tests(test_class): \"\"\"", "getattr(self, '_secret_report_storage') idx_to_add = None for idx, test_step in enumerate(test_report.steps):", "'_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name)", "* from .runner import get_test_modules, get_long_path_len, get_class_from_module, known_issue_str from .priority_decorator", "only one iteration requested test_param['repeated_test_name'] = [] else: # rare", "test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, \"__attrib__\"): attribs = getattr(test_function,", "test_method_names): \"\"\" Collect given set of tests from test module", "% test_function.__skipped_message__ skip_test = True if hasattr(test_function, \"__skip_cond__\"): skip_condition =", "= fake_init self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) else: # for", "# instance of Result class result = None # current", "kwargs.get('modules') else: self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config = config self.long_path_len", "in enumerate(cfg_options) ])), color='blue') else: cfg_options = None configuration =", "self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names)", "self.tests_to_execute, self.skipped_tests config fills in config['rt'], config['rt']['remote'] Creates test module", "up self.test_class_name self.module_short_name self.test_class - creates instance of test case", "skip_test = False skip_msg = None skip_no_start = False test_function", "config fills in config['rt'], config['rt']['remote'] Creates test module working local", "import_module(\"suites.%s\" % test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module, test_class_name)(self.config,", "MUTED' known_issue = None if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function,", "current_method_name else self.current_test_method test_method_name = test_method.split('(')[0] if '(' in test_method", "%s tests. %s skipped. Going to 'run' %s tests ***\"", "within module: # test name, with all configuration options current_test_name", "cfg_options is None: cfg_options = getattr(self.test_class, '__configuration_options__') if configuration is", "f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed to send report. \\n{format_exc()}', color='pink')", "test_with_iterations: log_print(\"{} started (iteration {} from {})\".format(pad_string, self.test_iteration + 1,", "!= 'pass' and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1)) break", "'executed' tests for test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan =", "%s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't", "full name of module under 'suites' directory sets up self.test_class_name", "test_method_names): # create attr.yaml for current_test_name in test_method_names: test_function =", "in configurations: # set configuration options from given configuration to", "# set configuration options from given configuration to Tiden config,", "\"__attrib__\") attribs.append(str(self.current_test_method)) # if attr is passed to runner and", "%s skipped. Going to run %s tests ***\\n%s\" % (", "else: cfg_options = None configuration = None test_method_names = list(self.gen_tests(self.test_class))", "e.g. test module file name without .py extension module_short_name =", "= test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start'])", "of %s' % test_function.__skipped_message__ skip_test = True if hasattr(test_function, \"__skip_cond__\"):", "] self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't create symlink to current test\",", "+ str( configuration[i]) for i, cfg_option_name in enumerate(cfg_options) ])), color='blue')", "to prepare :param fake_init: do not init module :return: \"\"\"", "self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {}))", "try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute setup or teardown method", "== 1: # don't rename tests when only one iteration", "skip_msg, 'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name)", "# dictionary of TidenTestPlan indexed by test module name test_plan", "= self.config.get('rt', {}).get('remote', {}).get('test_dir') if 'WardReport' in self.config.get('plugins', []): report_config", "remote directories. Copies resources from suite directory to local test", "repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail']", "attribute # then skip it. if 'mute' in attribs: skip_msg", "function didn't kill nodes if not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return", "all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute() except Exception as e: log_print(f'!!! Exception", "hasattr(test_function, \"__teardown__\"): teardown_fixture = getattr(test_function, \"__teardown__\") teardown_method = getattr(self.test_class, teardown_fixture)", "# Now generate results for 'executed' tests for test_module in", "options for %s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([ '\\t' + cfg_option_name +", "config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'), config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool", "= self._run_test() if test_with_iterations and test_status != 'pass' and not", "uuid4 from traceback import format_exc from .runner import set_configuration_options, get_configuration_representation,", "get_class_from_module(self.module_short_name) # Update Tiden config self.config['rt'] = { 'test_class': self.test_class_name,", "test_dir_name = '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir'] =", "{remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self, method_name): method_to_execute = None try: self._call_plugin_manager(f'before_test_method_{method_name}')", "None def __init__(self): self.all_tests = {} self.skipped_tests = [] self.tests_to_execute", "current_method_name=fixture_name) # except (AssertionError, TidenException) as e: except Exception as", "You may obtain a copy of the License at #", "import AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts': []}) def empty_init(self, config, ssh_pool):", "not init module :return: \"\"\" self.test_module = module_name # fill", "try: if hasattr(self.test_class, fixture_name): started = time() try: self.__print_with_format('started', current_method_name=fixture_name)", "[ 'mkdir -p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln -sfn", "= repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False) return test_params def _skip_tests(self):", "class test_class_name = None # instance of current module' test", "empty_init(self, config, ssh_pool): self.config = config self.ssh = ssh_pool self.__prepare_session_vars()", "= self.config self.test_class.ssh = self.ssh_pool self._save_config() def __prepare_test_vars(self, test_method_name=None, configuration=None,", "{ 'test_method_name': test_method_name, } is_skipped, skip_msg, skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0())", "= None def __print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module): method_long_name = \"%s.%s.%s", "enumerate(cfg_options) ])), color='blue') else: cfg_options = None configuration = None", "given option re_decorate = True repeat_count = int(repeated_test_option[self.current_test_method]) if re_decorate:", "repeated_test iterations <= 0' skip_no_start = False if is_skipped: test_param.update({", "'test_method_name': test_method_name, } is_skipped, skip_msg, skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count", "# cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module) # find", "current_method_name = self.current_test_method else: current_method_name = '' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7],", "module teardown self.__call_module_setup_teardown('teardown') # this is for correct fail in", "self.result.stop_testcase('pass') def process_tests(self): \"\"\" Run all tests :return: \"\"\" log_print(\"***", "in %s.%s.%s: %s\\n%s' % (self.test_module, self.test_class_name, fixture_name, str(e), str(traceback.format_exc())), color='red')", "> 1 else False pad_string = self.__get_pad_string() log_print(\"%s started (%s", "test_cnt = 0 # Skipped tests do not hit collect", "self._run_tests(tests_to_execute) # Execute module teardown self.__call_module_setup_teardown('teardown') # this is for", "def _skip_tests(self): test_plan = self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests) try: for", "test is not marked with one of the attribute #", "in sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module,", "except (AssertionError, TidenException) as e: except Exception as e: fixture_passed", "self.ssh_pool: SshPool = kwargs.get('ssh_pool') self.pm: PluginManager = kwargs.get('plugin_manager') def collect_tests(self):", "color='green') except (AssertionError, TidenException) as e: test_status = 'fail' test_exception", "to current test\", color='red') self._save_config() def _check_test_for_skip(self): attribs = []", "report. \\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources in test", "'remote': { 'test_module_dir': remote_test_module_dir, } } module = import_module(\"suites.%s\" %", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: # that's a brand new", "import import_module from os import path, mkdir from time import", "be set to proper full name of module under 'suites'", "'pass' and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1)) break finally:", "test_with_iterations and test_status != 'pass' and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration", "ssh_pool = None # Tiden PluginManager instance pm = None", "if hasattr(test_function, \"__setup__\"): setup_fixture = getattr(test_function, \"__setup__\") if type(setup_fixture) ==", "log_print(\"Can't create symlink to current test\", color='red') self._save_config() def _check_test_for_skip(self):", "set configuration options from given configuration to Tiden config, #", "of test module, e.g. test module file name without .py", "repeated_test_option.keys(): # otherwise re-decorate only if test name matches given", "= f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name, AttachmentType.FILE) if upload_logs: cmd =", "if self.current_test_method: current_method_name = self.current_test_method else: current_method_name = '' log_print(\"[{}][.{}.{}]", "instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module) # find test methods:", "language governing permissions and # limitations under the License. from", "options # and decorate test with @repeated_test automagically if that's", "skip_msg = 'skipped cause of %s' % skip_message skip_test =", "test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations", "self.__call_module_setup_teardown('teardown') # this is for correct fail in Jenkins if", "skip_message skip_test = True return skip_test, skip_msg, skip_no_start def get_tests_results(self):", "in {method_name} code !!!', color='red') log_print(traceback.format_exc()) try: self.__save_logs() except: log_print(f'Failed", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "(self.config['suite_dir'], self.module_short_name[5:]) for file in glob(\"%s/*\" % self.config['rt']['resource_dir']): if path.isfile(file):", "'configuration': configuration, 'cfg_options': cfg_options, }) def collect_tests1(self, test_method_names, common_test_param={}): \"\"\"", "License. # You may obtain a copy of the License", "e tb_msg = traceback.format_exc() finally: if test_status != 'pass': log_print(tb_msg,", "repeated_test_count), color='yellow') test_status = self._run_test() if test_with_iterations and test_status !=", "skip_msg, skip_no_start def get_tests_results(self): return self.result def _save_config(self): write_yaml_file(self.config['config_path'], self.config)", ".result import Result from .util import write_yaml_file, should_be_skipped from .logger", "traceback class TidenTestPlan: all_tests = None skipped_tests = None tests_to_execute", "None # instance of current module' test case class test_class", "= import_module(\"suites.%s\" % self.test_module) # used for collect_only if fake_init:", "if not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:])", "\"%s/res\" % self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\"", "hasattr(test_function, \"__skipped__\"): skip_msg = 'skipped cause of %s' % test_function.__skipped_message__", "get_test_modules, get_long_path_len, get_class_from_module, known_issue_str from .priority_decorator import get_priority_key from .sshpool", "self.__get_pad_string() started = int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig())", "getattr(self.test_class, '__configuration_options__') configuration = get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options for %s:\\n%s\"", "= join(config.get('var_dir'), config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool = kwargs.get('ssh_pool')", "test function to method of a test class from types", "test_plan = self.test_plan[self.test_module] for test_name in sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name]", "sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module) #", "all_tests = None skipped_tests = None tests_to_execute = None def", "path, mkdir from time import time from shutil import copyfile", "self.result.update_xunit() log_print(\"%s %s\" % (pad_string, test_param['skip_msg']), color='yellow') finally: self.current_test_name =", "def _run_tests(self, tests_to_execute): test_plan = self.test_plan[self.test_module] try: for test_cnt, current_test", "= None # a name of module' test class test_class_name", "= kwargs.get('plugin_manager') def collect_tests(self): \"\"\" Collect tests from all modules.", "get_long_path_len(self.modules) from tiden.sshpool import AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts': []}) def", "= getattr(self.test_class, '__configuration_options__') if configuration is None: configuration = get_actual_configuration(self.config,", "setup_method # next, teardown fixture if hasattr(test_function, \"__teardown__\"): teardown_fixture =", "test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s tests.", "Kill java process if teardown function didn't kill nodes if", "as e: log_print(f'!!! Exception in {method_name} code !!!', color='red') log_print(traceback.format_exc())", "color='red') finally: self._call_plugin_manager('after_test_class_%s' % fixture_name) return fixture_passed def _call_plugin_manager(self, execution_point):", "# so that test can check options and skip itself", "None # test method name only current_test_method = None def", "'pass' pad_string = self.__get_pad_string() started = int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue')", "prepare :param fake_init: do not init module :return: \"\"\" self.test_module", "file_name, send_file_name, AttachmentType.FILE) if upload_logs: cmd = f'cd {test_dir}; '", "permissions and # limitations under the License. from .tidenpluginmanager import", "if test_with_iterations: log_print(\"{} started (iteration {} from {})\".format(pad_string, self.test_iteration +", "+ test_report.steps[idx_to_add].get('children', []) title = getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None) suites", "log_print(\"*** Tests ***\", color='blue') self.__prepare_session_vars() # Check requirements for applications", "Exception as e: log_print(f'!!! Exception in {method_name} code !!!', color='red')", "True repeat_count = int(repeated_test_option[self.current_test_method]) if re_decorate: from tiden.util import repeated_test", "test with @repeated_test automagically if that's required if self.config.get('repeated_test'): repeated_test_option", "all tests re_decorate = True repeat_count = int(repeated_test_option) elif self.current_test_method", "(self.test_module, self.test_class_name, fixture_name, str(e), str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s' % fixture_name)", "self.current_test_name = self.current_test_method def collect_test0(self): # collect test params test_params", "if 'WardReport' in self.config.get('plugins', []): report_config = self.config['plugins']['WardReport'] files_receiver_url =", "= self.__get_pad_string() log_print(\"%s started (%s from %s)\" % (pad_string, test_cnt,", "TidenTestPlan for all modules: total = None # dictionary of", "}) test_plan = self.test_plan[self.test_module] if len(test_plan.skipped_tests) > 0: self._skip_tests() if", "# Execute module setup setup_passed = self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute)", "<reponame>mshonichev/example_pkg<filename>src/tiden/tidenrunner.py #!/usr/bin/env python3 # # Copyright 2017-2020 GridGain Systems. #", "set explicitly by decorator or framework option repeat_count = 1", "is_skipped = True skip_msg = 'skipped due to repeated_test iterations", "= \"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name) # Update", "test_function.__skip_conds__ for skip_condition in skip_conditions: conditions_met, skip_message = skip_condition(self.test_class) if", "getattr(test_function, \"__skip_cond__\") conditions_met, skip_message = skip_condition(self.config) if not conditions_met: skip_msg", "self.test_module = module_name # fill new module vars self.module_short_name =", "the test if method_name == 'setup': raise e finally: self.__set_child_steps_to_parent()", "get_actual_configuration from importlib import import_module from os import path, mkdir", "% ( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name", ":return: \"\"\" test_resource_dir = \"%s/res\" % self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir):", "} is_skipped, skip_msg, skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count',", "current_method_name if current_method_name else self.current_test_method test_method_name = test_method.split('(')[0] if '('", "self.pm: PluginManager = kwargs.get('plugin_manager') def collect_tests(self): \"\"\" Collect tests from", "create_case(test_function) def __prepare_session_vars(self): self.test_plan = {} self.total = TidenTestPlan() def", "runner and test is not marked with one of the", "import set_configuration_options, get_configuration_representation, get_actual_configuration from importlib import import_module from os", "skip_msg = None skip_no_start = False test_function = getattr(self.test_class, self.current_test_method)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "vars self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name)", "= [self.test_module, self.test_class] if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args) def __update_config_and_save(self,", "PluginManager = kwargs.get('plugin_manager') def collect_tests(self): \"\"\" Collect tests from all", "in skip_conditions: conditions_met, skip_message = skip_condition(self.test_class) if not conditions_met: skip_msg", "magic required to convert decorated test function to method of", "method_name): method_to_execute = None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests if", "None: idx_to_add = idx break test_report.steps[idx_to_add]['children'] = exec_report.steps + test_report.steps[idx_to_add].get('children',", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() # Set ssh and config apps model", "test_param.copy() finally: self.current_test_method = None self.current_test_name = None def __print_found_test_method_to_execute(self,", "# == for current test module: # a short name", "required by applicable law or agreed to in writing, software", "try: create_remote_dir = [ 'mkdir -p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "if exception in setup method then re-raise the exception as", "# rare case, skip by --to=repeated_test.test_name=0 is_skipped = True skip_msg", "if test_status != 'pass': log_print(tb_msg, color='red') log_print(\"{} {} {}{}\".format(pad_string, test_status,", "test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self, method_name): method_to_execute", "tests ***\", color='blue') long_path_len = get_long_path_len(self.modules) from tiden.sshpool import AbstractSshPool", "repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1)) break finally: self.current_test_name = None", "= get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options for %s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([", "Exception: log_print(\"Can't create symlink to current test\", color='red') self._save_config() def", "classes self.test_class.tiden.config = self.config self.test_class.tiden.ssh = self.ssh_pool self.test_class.config = self.config", "run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info') else None ) # Execute test", "cfg_options = None configuration = None test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names,", "\"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed to send", "path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir, test_module_dir):", "\"\"\" for class_attr in dir(test_class): if class_attr.startswith('test_'): yield class_attr def", "nodes if not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status @step('logs') def", "agreed to in writing, software # distributed under the License", "cfg_option_name in enumerate(cfg_options) ])), color='blue') else: cfg_options = None configuration", "skip_msg = '{} cause of {}'.format(skip_msg, known_issue) skip_test = True", "distributed under the License is distributed on an \"AS IS\"", "'tiden'): self.__copy_resources_to_local_test_module_directory() # Set ssh and config apps model classes", "break finally: self.current_test_name = None self.current_test_method = None def _run_test(self):", "if not current_method_name: if self.current_test_method: current_method_name = self.current_test_method else: current_method_name", "'path': <full-path-to-test-file>, # 'module_short_name': <test_file_name>, # } # } modules", "teardown fixture if hasattr(test_function, \"__teardown__\"): teardown_fixture = getattr(test_function, \"__teardown__\") teardown_method", "color='red') log_print(\"{} {} {}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status,", "% fixture_name) return fixture_passed def _call_plugin_manager(self, execution_point): args = [self.test_module,", "if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") if known_issue: skip_msg", "from .util import write_yaml_file, should_be_skipped from .logger import * from", "fixture_passed = False self.__print_with_format('failed in %s sec' % (int(time() -", "= None # Tiden SshPool instance ssh_pool = None #", "for current test within module: # test name, with all", "cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names", "current test\", color='red') self._save_config() def _check_test_for_skip(self): attribs = [] skip_test", "# Tiden SshPool instance ssh_pool = None # Tiden PluginManager", "if len(test_plan.skipped_tests) > 0: self._skip_tests() if len(test_plan.tests_to_execute) > 0: tests_to_execute", "True if repeated_test_count > 1 else False pad_string = self.__get_pad_string()", "current_method_name = '' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg)) def", "tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info') else None ) #", ":param common_test_param: :return: \"\"\" try: test_plan = self.test_plan[self.test_module] for test_method_name", "self.test_class.ssh = self.ssh_pool self._save_config() def __prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None, **kwargs):", "collect_only=kwargs.get('collect_only')) self.config = config self.long_path_len = get_long_path_len(self.modules) xunit_path_var = None", "% (pad_string, test_param['skip_msg']), color='yellow') finally: self.current_test_name = None self.current_test_method =", "None # current test module, a key to self.modules dictionary", "= skip_condition(self.test_class) if not conditions_met: skip_msg = 'skipped cause of", "self.test_iteration = 1 self.current_test_method = test_method_name if hasattr(self.test_class, '__configurations__'): if", "getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] = teardown_method # don't forget known issues", "InnerReportConfig = getattr(self, '_secret_report_storage') idx_to_add = None for idx, test_step", "= self.__get_pad_string() started = int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage',", "test resource directory :return: \"\"\" test_resource_dir = \"%s/res\" % self.config['rt']['test_module_dir']", "test_exception = e tb_msg = traceback.format_exc() finally: if test_status !=", "cause of attrib mismatch' skip_test = True skip_no_start = True", "next, teardown fixture if hasattr(test_function, \"__teardown__\"): teardown_fixture = getattr(test_function, \"__teardown__\")", "= setup_fixture test_params['setup_test_params'] = True test_params['setup_test_method'] = setup_method # next,", "None) if title: test_report.title = title test_report.suites = suites setattr(self,", "% (int(time() - started)), current_method_name=fixture_name) log_print('Exception in %s.%s.%s: %s\\n%s' %", "= True if hasattr(test_function, \"__skipped__\"): skip_msg = 'skipped cause of", "tests_to_execute = None def __init__(self): self.all_tests = {} self.skipped_tests =", "time from shutil import copyfile from os.path import join, basename", "is not None: self.modules = kwargs.get('modules') else: self.modules = get_test_modules(config,", "try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute() except Exception as e:", "= getattr(test_function, \"__setup__\") if type(setup_fixture) == type(''): setup_method = getattr(self.test_class,", "None for idx, test_step in enumerate(test_report.steps): if test_step['status'] is None:", "= getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None) suites = getattr(getattr(self.test_class, self.current_test_method), '__report_suites__',", "len(tests_to_execute)), color='yellow') for self.test_iteration in range(repeated_test_count): if test_with_iterations: log_print(\"{} started", "instance of Result class result = None # current test", "test_module = None # == TidenTestPlan for all modules: total", "in tests_to_execute ])), color='blue') # Execute module setup setup_passed =", "ssh and config apps model classes self.test_class.tiden.config = self.config self.test_class.tiden.ssh", "None: cfg_options = getattr(self.test_class, '__configuration_options__') if configuration is None: configuration", "import SshPool from uuid import uuid4 from traceback import format_exc", "except (AssertionError, TidenException) as e: test_status = 'fail' test_exception =", "[] skip_test = False skip_msg = None skip_no_start = False", "ssh_pool self.__prepare_session_vars() for test_module in sorted(self.modules.keys()): # cleanup instance vars", "self.test_class_name, fixture_name, str(e), str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s' % fixture_name) return", "log_print(\"*** Found %s tests. %s skipped. Going to 'run' %s", "= None tb_msg = None test_status = 'pass' pad_string =", "None tests_to_execute = None def __init__(self): self.all_tests = {} self.skipped_tests", "self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() #", "self.test_module)) def __get_pad_string(self, msg=None): return (\"%s.%s.%s \" % ( self.test_module,", "self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__))", "OR CONDITIONS OF ANY KIND, either express or implied. #", "do not init module :return: \"\"\" self.test_module = module_name #", "= False self.__print_with_format('failed in %s sec' % (int(time() - started)),", "% fixture_name) fixture_passed = True try: if hasattr(self.test_class, fixture_name): started", "setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class)) test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function,", "the License is distributed on an \"AS IS\" BASIS, #", "# if option was given as --to=repeated_test=N, re-decorate all tests", "and config apps model classes self.test_class.tiden.config = self.config self.test_class.tiden.ssh =", "config, ssh_pool): self.config = config self.ssh = ssh_pool self.__prepare_session_vars() for", "test_class = None # == for current test within module:", "name of test module, e.g. test module file name without", "test_cnt, current_test in enumerate(tests_to_execute, start=1): test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count", "MethodType(decorated_test, self.test_class)) test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, 'repeated_test_count'): repeat_count", "title = getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None) suites = getattr(getattr(self.test_class, self.current_test_method),", "= self.current_test_method def collect_test0(self): # collect test params test_params =", "log_print(f'Failed to get logs\\n{traceback.format_exc()}', color='pink') # if exception in setup", "options and skip itself set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration':", "len(test_plan.tests_to_execute) > 0: tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found %s", "else: current_method_name = '' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg))", "report # Now generate results for 'executed' tests for test_module", "fake_init=empty_init) self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found", "tests. %s skipped. Going to 'run' %s tests ***\" %", "% ( self.test_module, self.test_class_name, msg if msg else self.current_test_method)) \\", "from types import MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class)) test_function =", "= module_name # fill new module vars self.module_short_name = self.modules[self.test_module]['module_short_name']", "None skip_no_start = False test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function,", "= int(repeated_test_option[self.current_test_method]) if re_decorate: from tiden.util import repeated_test original_test =", "sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found %s tests in %s. %s skipped.", "= test_method_name all_tests = self.test_plan[self.test_module].all_tests # cause of repeated_tests decorator", "def _run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception = None tb_msg =", "cause of %s' % test_function.__skipped_message__ skip_test = True if hasattr(test_function,", "sorted(self.modules.keys()): module = import_module(\"suites.%s\" % test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class", "law or agreed to in writing, software # distributed under", "= getattr(module, self.test_class_name) self.test_class.__init__ = fake_init self.test_class = getattr(module, self.test_class_name)(self.config,", "os import path, mkdir from time import time from shutil", "of internal variables: Expects self.test_module be set to proper full", "%s skipped. Going to 'run' %s tests ***\" % (", "of tests from test module for all configurations :param test_method_names:", "'\\t' + cfg_option_name + '=' + str( configuration[i]) for i,", "self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\" % (pad_string,", "self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir", "self.tests_to_execute = [] def update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class", "getattr(module, self.test_class_name) self.test_class.__init__ = fake_init self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool)", "= '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format(", "%s' % test_function.__skipped_message__ skip_test = True if hasattr(test_function, \"__skip_cond__\"): skip_condition", "self.test_plan[self.test_module] for test_name in sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt", "method of given test class :param test_class: :return: \"\"\" for", "resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class,", "else: setup_method = setup_fixture test_params['setup_test_params'] = True test_params['setup_test_method'] = setup_method", "test_status != 'pass' and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1))", "add_attachment(self, file_name, send_file_name, AttachmentType.FILE) if upload_logs: cmd = f'cd {test_dir};", "re-decorate only if test name matches given option re_decorate =", "%s\" % (pad_string, test_param['skip_msg']), color='yellow') finally: self.current_test_name = None self.current_test_method", "dictionary test_module = None # == TidenTestPlan for all modules:", ".util import log_print, unix_path, call_method, create_case, kill_stalled_java, exec_time from .result", "True elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')): skip_msg =", "self.test_iteration + 1, repeated_test_count), color='yellow') test_status = self._run_test() if test_with_iterations", "resources in test resource directory :return: \"\"\" test_resource_dir = \"%s/res\"", "don't rename tests when only one iteration requested test_param['repeated_test_name'] =", "file in glob(\"%s/*\" % self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir']", "test class from types import MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class))", "may obtain a copy of the License at # #", ".util import write_yaml_file, should_be_skipped from .logger import * from .runner", "or teardown method call_method(self.test_class, fixture_name) self.__print_with_format('finished in %s sec' %", "of %s' % skip_message skip_test = True if hasattr(test_function, \"__skip_conds__\")", "test_cnt, test_module): method_long_name = \"%s.%s.%s \" % (test_module, self.test_class_name, self.current_test_name)", "Run all tests :return: \"\"\" log_print(\"*** Tests ***\", color='blue') self.__prepare_session_vars()", "return if test_dir: try: for host_ip, output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items():", "\"__known_issues__\") if known_issue: skip_msg = '{} cause of {}'.format(skip_msg, known_issue)", "\\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed to send report. \\n{format_exc()}',", "= None if kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path') elif config.get('var_dir') and", "in test_method else test_method test_dir_name = test_method_name all_tests = self.test_plan[self.test_module].all_tests", "= 'skipped cause of attrib mismatch' skip_test = True skip_no_start", "method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute()", ":param module_name: name of the module to prepare :param fake_init:", "current_method_name, msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7], self.test_module)) def", "may not use this file except in compliance with the", "import get_test_modules, get_long_path_len, get_class_from_module, known_issue_str from .priority_decorator import get_priority_key from", "= ssh_pool self.__prepare_session_vars() for test_module in sorted(self.modules.keys()): # cleanup instance", "= test_cnt + 1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass')", "we check --to=repeated_test=N and --to=repeated_test.test_name=N options # and decorate test", "test_status = 'error' test_exception = e tb_msg = traceback.format_exc() finally:", "line.split('\\n'): if file_name and file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}' add_attachment(self, file_name,", "pad_string = method_long_name.ljust(long_path_len, '.') log_print(\"%s found (%s from %s)\" %", "def __set_child_steps_to_parent(self): exec_report: InnerReportConfig = getattr(self.test_class, '_secret_report_storage', None) test_report: InnerReportConfig", "from tiden.sshpool import AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts': []}) def empty_init(self,", "self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def __create_test_module_directory(self,", "test_cnt = test_cnt + 1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module)", "method_name == 'setup': raise e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self):", "if not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else: cfg_options = getattr(self.test_class, '__configuration_options__').copy()", "this file except in compliance with the License. # You", "configuration, 'cfg_options': cfg_options, }) def collect_tests1(self, test_method_names, common_test_param={}): \"\"\" Collect", "decorator if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration])", "test_method_name=None, configuration=None, cfg_options=None, **kwargs): if not test_method_name: return self.test_iteration =", "self.test_iteration + 1)) break finally: self.current_test_name = None self.current_test_method =", "= getattr(self.test_class, setup_fixture) else: setup_method = setup_fixture test_params['setup_test_params'] = True", "# that's a brand new decoration decorated_test = repeated_test(repeat_count)(original_test.__func__) #", "test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test setup", "# fill new module vars self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir =", "mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self, method_name): method_to_execute =", "don't forget known issues if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function,", "write_yaml_file, should_be_skipped from .logger import * from .runner import get_test_modules,", "format_exc from .runner import set_configuration_options, get_configuration_representation, get_actual_configuration from importlib import", "# # Licensed under the Apache License, Version 2.0 (the", "common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) test_plan = self.test_plan[self.test_module] if", "for all modules: total = None # dictionary of TidenTestPlan", "test class test_class_name = None # instance of current module'", "self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() # Set ssh and", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test setup method", "!= 'pass': log_print(tb_msg, color='red') log_print(\"{} {} {}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)),", "repeat_count = int(repeated_test_option[self.current_test_method]) if re_decorate: from tiden.util import repeated_test original_test", "f'curl -H \"filename: {send_file_name}\" ' \\ f'-F \"file=@{file_name};filename={file_name}\" ' \\", "color='pink') # if exception in setup method then re-raise the", "try: test_plan = self.test_plan[self.test_module] for test_method_name in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param)", "%s' % skip_message skip_test = True if hasattr(test_function, \"__skip_conds__\") and", "started (iteration {} from {})\".format(pad_string, self.test_iteration + 1, repeated_test_count), color='yellow')", "= test_method.split('(')[0] if '(' in test_method else test_method test_dir_name =", "requested test_param['repeated_test_name'] = [] else: # rare case, skip by", "not marked with one of the attribute # then skip", "{}).get('remote', {}).get('test_dir') if 'WardReport' in self.config.get('plugins', []): report_config = self.config['plugins']['WardReport']", "configuration to Tiden config, # so that test can check", "test module name test_plan = {} # == for current", "# '<suite_name>.<test_file_name>': { # 'path': <full-path-to-test-file>, # 'module_short_name': <test_file_name>, #", "{ 'test_class': self.test_class_name, 'test_method': None, 'test_module': self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir':", "tests ***\\n%s\" % ( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name']", "setup_method = getattr(self.test_class, setup_fixture) else: setup_method = setup_fixture test_params['setup_test_params'] =", "_check_test_for_skip(self): attribs = [] skip_test = False skip_msg = None", "0' skip_no_start = False if is_skipped: test_param.update({ 'skip_msg': skip_msg, 'skip_no_start':", "fail in Jenkins if not setup_passed: exit(1) def create_test_module_attr_yaml(self, test_method_names):", "option re_decorate = True repeat_count = int(repeated_test_option[self.current_test_method]) if re_decorate: from", "import_module from os import path, mkdir from time import time", "method name only current_test_method = None def __init__(self, config, **kwargs):", "hasattr(original_test, 'repeated_test_name'): # that test was previously decorated by @repeated_test,", "True if hasattr(test_function, \"__skip_conds__\") and \\ len(test_function.__skip_conds__) > 0: skip_conditions", "log_print(tb_msg, color='red') log_print(\"{} {} {}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase(", "_run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception = None tb_msg = None", "test module setup/teardown fixture. :param fixture_name: either 'setup' or 'teardown'", "configuration options from given configuration to Tiden config, # so", "self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1)) break finally: self.current_test_name = None self.current_test_method", "% (int(time() - started)), current_method_name=fixture_name) # except (AssertionError, TidenException) as", "Copyright 2017-2020 GridGain Systems. # # Licensed under the Apache", "decorated test function to method of a test class from", "%s tests ***\\n%s\" % ( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([", "params test_params = { 'test_name': self.current_test_name, } test_function = getattr(self.test_class,", "{} self.total = TidenTestPlan() def __prepare_module_vars(self, module_name, fake_init=None): \"\"\" Prepare", "[] def update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: #", "test module, a key to self.modules dictionary test_module = None", "or implied. # See the License for the specific language", "internal variables: Expects self.test_module be set to proper full name", "test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy() finally: self.current_test_method = None self.current_test_name", "= TidenTestPlan() def __prepare_module_vars(self, module_name, fake_init=None): \"\"\" Prepare per-module initialization", "def __prepare_module_vars(self, module_name, fake_init=None): \"\"\" Prepare per-module initialization of internal", "test case class resets self.all_tests, self.tests_to_execute, self.skipped_tests config fills in", "resource directory :return: \"\"\" test_resource_dir = \"%s/res\" % self.config['rt']['test_module_dir'] if", "configuration[i]) for i, cfg_option_name in enumerate(cfg_options) ])), color='blue') else: cfg_options", "not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:]) for", "(AssertionError, TidenException) as e: test_status = 'fail' test_exception = e", "skip_message = skip_condition(self.test_class) if not conditions_met: skip_msg = 'skipped cause", "test_cnt, len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self, msg='', current_method_name=''): if not current_method_name:", "= time() try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute setup or", "'skipped due to repeated_test iterations <= 0' skip_no_start = False", "method_to_execute = None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'):", "1)) break finally: self.current_test_name = None self.current_test_method = None def", "fixture_name): \"\"\" Execute test module setup/teardown fixture. :param fixture_name: either", "'any')): skip_msg = 'skipped cause of attrib mismatch' skip_test =", "\"\"\" try: test_plan = self.test_plan[self.test_module] for test_method_name in test_method_names: self.__prepare_test_vars(test_method_name,", "= test_param.copy() finally: self.current_test_method = None self.current_test_name = None def", "def __save_logs(self): test_dir = self.config.get('rt', {}).get('remote', {}).get('test_dir') if 'WardReport' in", "skip_test = True if hasattr(test_function, \"__skip_conds__\") and \\ len(test_function.__skip_conds__) >", "governing permissions and # limitations under the License. from .tidenpluginmanager", "= self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\" %", "= test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail'] =", "current module' test case class test_class = None # ==", "tests_to_execute): test_plan = self.test_plan[self.test_module] try: for test_cnt, current_test in enumerate(tests_to_execute,", "% self.test_module) # used for collect_only if fake_init: self.test_class =", "# first setup fixture if hasattr(test_function, \"__setup__\"): setup_fixture = getattr(test_function,", "color='blue') self.__prepare_session_vars() # Check requirements for applications for test_module in", "= True skip_no_start = True if hasattr(test_function, \"__skipped__\"): skip_msg =", "self.current_test_method) if hasattr(test_function, \"__attrib__\"): attribs = getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) #", "= None self.current_test_method = None def _run_tests(self, tests_to_execute): test_plan =", "\"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed = True try: if hasattr(self.test_class,", "test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy() finally:", "to method of a test class from types import MethodType", "start=1): test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail", "self.test_class.tiden.config = self.config self.test_class.tiden.ssh = self.ssh_pool self.test_class.config = self.config self.test_class.ssh", "long_path_len = 0 # instance of Result class result =", "\" % ( self.test_module, self.test_class_name, msg if msg else self.current_test_method))", "setup_passed: self._run_tests(tests_to_execute) # Execute module teardown self.__call_module_setup_teardown('teardown') # this is", "self.test_class_name self.module_short_name self.test_class - creates instance of test case class", "is passed to runner and test is not marked with", "in output_lines: file_name: str for file_name in line.split('\\n'): if file_name", "= False test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, \"__attrib__\"): attribs", "self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self, method_name): method_to_execute = None", "with all configuration options current_test_name = None # test method", "finally: if test_status != 'pass': log_print(tb_msg, color='red') log_print(\"{} {} {}{}\".format(pad_string,", "test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module, test_class_name)(self.config, self.ssh_pool) if hasattr(test_class,", "current_method_name: if self.current_test_method: current_method_name = self.current_test_method else: current_method_name = ''", "skip it. if 'mute' in attribs: skip_msg = 'skipped cause", "short name of test module, e.g. test module file name", "started)), current_method_name=fixture_name) # except (AssertionError, TidenException) as e: except Exception", "without .py extension module_short_name = None # a name of", "if test_step['status'] is None: idx_to_add = idx break test_report.steps[idx_to_add]['children'] =", "# longest length of the test name long_path_len = 0", "from glob import glob import traceback class TidenTestPlan: all_tests =", "self.__prepare_test_vars(test_method_name, **common_test_param) test_param = { 'test_method_name': test_method_name, } is_skipped, skip_msg,", "fake_init=None): \"\"\" Prepare per-module initialization of internal variables: Expects self.test_module", "= None test_status = 'pass' pad_string = self.__get_pad_string() started =", "= sorted(test_plan.skipped_tests) try: for current_test in skipped_tests: test_param = test_plan.all_tests[current_test]", "check --to=repeated_test=N and --to=repeated_test.test_name=N options # and decorate test with", "test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail',", "= None self.current_test_name = None def __print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module):", "% self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def", "if file_name and file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name,", "{} # == for current test module: # a short", "hasattr(self.test_class, '__configurations__'): cfg_options = getattr(self.test_class, '__configuration_options__') configuration = get_actual_configuration(self.config, cfg_options)", "resets self.all_tests, self.tests_to_execute, self.skipped_tests config fills in config['rt'], config['rt']['remote'] Creates", "we should fail the test if method_name == 'setup': raise", "test_plan.all_tests[self.current_test_name] = test_param.copy() finally: self.current_test_method = None self.current_test_name = None", "by decorator or framework option repeat_count = 1 # here,", "import log_print, unix_path, call_method, create_case, kill_stalled_java, exec_time from .result import", "with Step(self, 'Execution'): try: call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string}", "kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path') elif config.get('var_dir') and config.get('xunit_file'): xunit_path_var =", "test_report.title = title test_report.suites = suites setattr(self, '_secret_report_storage', test_report) setattr(self.test_class,", "__call_test_setup_teardown(self, method_name): method_to_execute = None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests", "== for current test module: # a short name of", ":return: \"\"\" for class_attr in dir(test_class): if class_attr.startswith('test_'): yield class_attr", "PluginManager instance pm = None # longest length of the", "self.test_class)) test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, 'repeated_test_count'): repeat_count =", "TidenException) as e: except Exception as e: fixture_passed = False", "self.__print_with_format('failed in %s sec' % (int(time() - started)), current_method_name=fixture_name) log_print('Exception", "if current_method_name else self.current_test_method test_method_name = test_method.split('(')[0] if '(' in", "% (pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self, msg='', current_method_name=''): if", ":param test_method_names: :param common_test_param: :return: \"\"\" try: test_plan = self.test_plan[self.test_module]", "'=' + str( configuration[i]) for i, cfg_option_name in enumerate(cfg_options) ])),", "{ # 'path': <full-path-to-test-file>, # 'module_short_name': <test_file_name>, # } #", "initialization of internal variables: Expects self.test_module be set to proper", "'cfg_options': cfg_options, }) def collect_tests1(self, test_method_names, common_test_param={}): \"\"\" Collect given", "\"\"\" Collect tests from all modules. \"\"\" log_print(\"*** Collecting tests", "self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name) # Update Tiden config self.config['rt'] =", "configurations = getattr(self.test_class, '__configurations__').copy() for configuration in configurations: # set", "repeat_count > 0: if repeat_count == 1: # don't rename", "self.test_class.config = self.config self.test_class.ssh = self.ssh_pool self._save_config() def __prepare_test_vars(self, test_method_name=None,", "test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name): \"\"\" Execute test", "and skip itself set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration,", "__copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources in test resource directory :return: \"\"\"", "len(test_function.__skip_conds__) > 0: skip_conditions = test_function.__skip_conds__ for skip_condition in skip_conditions:", "rename tests when only one iteration requested test_param['repeated_test_name'] = []", "# this is for correct fail in Jenkins if not", "get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options for %s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([ '\\t'", "[cmd]) except: log_print(f'Failed to send report. \\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self):", "as e: fixture_passed = False self.__print_with_format('failed in %s sec' %", "configuration in configurations: # set configuration options from given configuration", "configuration) self.current_test_name = self.current_test_method + configuration_representation else: self.current_test_name = self.current_test_method", "fake_init: do not init module :return: \"\"\" self.test_module = module_name", "gen_tests(test_class): \"\"\" Generates all test method of given test class", "all modules: total = None # dictionary of TidenTestPlan indexed", "try: for current_test in skipped_tests: test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string", "skipped. Going to run %s tests ***\\n%s\" % ( len(test_plan.all_tests),", "if not test_method_name: return self.test_iteration = 1 self.current_test_method = test_method_name", "call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\", color='green') except", "Collect given set of tests from test module for all", "test name long_path_len = 0 # instance of Result class", "original test_names original_names = original_test.repeated_test_name decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else:", "test_with_iterations = True if repeated_test_count > 1 else False pad_string", "os.path import join, basename from glob import glob import traceback", "# Tiden config dictionary config = None # Tiden SshPool", "if cfg_options is None: cfg_options = getattr(self.test_class, '__configuration_options__') if configuration", "test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param = { 'test_method_name': test_method_name, } is_skipped,", "try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) #", "# for process tests - prepare test directory and resources", "hasattr(test_function, \"__setup__\"): setup_fixture = getattr(test_function, \"__setup__\") if type(setup_fixture) == type(''):", "hasattr(self.test_class, fixture_name): started = time() try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) #", "{ # '<suite_name>.<test_file_name>': { # 'path': <full-path-to-test-file>, # 'module_short_name': <test_file_name>,", "in writing, software # distributed under the License is distributed", "***\", color='blue') long_path_len = get_long_path_len(self.modules) from tiden.sshpool import AbstractSshPool self.ssh_pool", "'__report_title__', None) suites = getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None) if title:", "step, InnerReportConfig, Step, add_attachment, AttachmentType from .util import log_print, unix_path,", "key to self.modules dictionary test_module = None # == TidenTestPlan", "test_exception = None tb_msg = None test_status = 'pass' pad_string", "self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute) # Execute module teardown self.__call_module_setup_teardown('teardown') #", "results for 'executed' tests for test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init)", "self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try: create_remote_dir = [", "try: call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\", color='green')", "***\", color='blue') self.__prepare_session_vars() # Check requirements for applications for test_module", "TidenTestPlan() self.__prepare_module_vars(test_module) # find test methods: if hasattr(self.test_class, '__configurations__'): cfg_options", "test_name in tests_to_execute ])), color='blue') # Execute module setup setup_passed", "and test_status != 'pass' and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration +", "getattr(test_function, \"__known_issues__\") if known_issue: skip_msg = '{} cause of {}'.format(skip_msg,", "is MUTED' known_issue = None if hasattr(test_function, \"__known_issues__\"): known_issue =", "fixture. :param fixture_name: either 'setup' or 'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s'", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "test_status = self._run_test() if test_with_iterations and test_status != 'pass' and", "License, Version 2.0 (the \"License\"); # you may not use", "all configuration options current_test_name = None # test method name", "\"__setup__\"): setup_fixture = getattr(test_function, \"__setup__\") if type(setup_fixture) == type(''): setup_method", "{}'.format(skip_msg, known_issue) skip_test = True skip_no_start = True elif self.config.get('attrib')", "MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class)) test_function = getattr(self.test_class, self.current_test_method) if", "f'cd {test_dir}; ' \\ f'curl -H \"filename: {send_file_name}\" ' \\", "inner_report_config=getattr(self, '_secret_report_storage')) # Kill java process if teardown function didn't", "mkdir from time import time from shutil import copyfile from", "attribs: skip_msg = 'skipped cause test is MUTED' known_issue =", "create_remote_dir = [ 'mkdir -p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)),", "getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) # if attr is passed to runner", "True return skip_test, skip_msg, skip_no_start def get_tests_results(self): return self.result def", "hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") if known_issue: skip_msg =", "def get_tests_results(self): return self.result def _save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod def", "log_print(traceback.format_exc()) try: self.__save_logs() except: log_print(f'Failed to get logs\\n{traceback.format_exc()}', color='pink') #", "'skipped cause of %s' % skip_message skip_test = True if", "self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module) # find test methods: if hasattr(self.test_class,", "= repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: # that's a brand new decoration", "hasattr(self.test_class, '__configurations__'): if cfg_options is None: cfg_options = getattr(self.test_class, '__configuration_options__')", "the License for the specific language governing permissions and #", "in setup method then re-raise the exception as we should", "here, we check --to=repeated_test=N and --to=repeated_test.test_name=N options # and decorate", "%s. %s skipped. Going to run %s tests ***\\n%s\" %", "{method_name} code !!!', color='red') log_print(traceback.format_exc()) try: self.__save_logs() except: log_print(f'Failed to", "the License. from .tidenpluginmanager import PluginManager from .report.steps import step,", "True skip_no_start = True elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match',", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7],", "self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"***", "vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class)))", "self._skip_tests() if len(test_plan.tests_to_execute) > 0: tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"***", "in test_method_names: test_function = getattr(self.test_class, current_test_name) create_case(test_function) def __prepare_session_vars(self): self.test_plan", "# find test methods: if hasattr(self.test_class, '__configurations__'): cfg_options = getattr(self.test_class,", "skipped_tests = sorted(test_plan.skipped_tests) try: for current_test in skipped_tests: test_param =", "PluginManager from .report.steps import step, InnerReportConfig, Step, add_attachment, AttachmentType from", "config self.config['rt'] = { 'test_class': self.test_class_name, 'test_method': None, 'test_module': self.test_module,", "kwargs.get('ssh_pool') self.pm: PluginManager = kwargs.get('plugin_manager') def collect_tests(self): \"\"\" Collect tests", "self._save_config() def __prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None, **kwargs): if not test_method_name:", "if re_decorate: from tiden.util import repeated_test original_test = test_function if", "self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config = config self.long_path_len = get_long_path_len(self.modules)", "\"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'],", "option repeat_count = 1 # here, we check --to=repeated_test=N and", "{}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\" % (", "= method_long_name.ljust(long_path_len, '.') log_print(\"%s found (%s from %s)\" % (pad_string,", "self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\" % (pad_string, test_param['skip_msg']), color='yellow')", "and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')): skip_msg = 'skipped cause of", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "long_path_len, test_cnt, test_module): method_long_name = \"%s.%s.%s \" % (test_module, self.test_class_name,", "1 else False pad_string = self.__get_pad_string() log_print(\"%s started (%s from", "test setup method self.__call_test_setup_teardown('setup') # self.__print_with_format() with Step(self, 'Execution'): try:", "test is MUTED' known_issue = None if hasattr(test_function, \"__known_issues__\"): known_issue", "msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self,", "self.test_class_name = get_class_from_module(self.module_short_name) # Update Tiden config self.config['rt'] = {", "self.config.get('rt', {}).get('remote', {}).get('test_dir') if 'WardReport' in self.config.get('plugins', []): report_config =", "new module vars self.module_short_name = self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\" %", "option was given as --to=repeated_test=N, re-decorate all tests re_decorate =", "try: for test_cnt, current_test in enumerate(tests_to_execute, start=1): test_param = test_plan.all_tests[current_test]", "def _save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod def gen_tests(test_class): \"\"\" Generates all", "log_print(\"%s found (%s from %s)\" % (pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow')", "skip_msg = 'skipped due to repeated_test iterations <= 0' skip_no_start", "brand new decoration decorated_test = repeated_test(repeat_count)(original_test.__func__) # this magic required", "self.skipped_tests config fills in config['rt'], config['rt']['remote'] Creates test module working", "config['rt'], config['rt']['remote'] Creates test module working local and remote directories.", "self.current_test_method + configuration_representation else: self.current_test_name = self.current_test_method def collect_test0(self): #", ".tidenpluginmanager import PluginManager from .report.steps import step, InnerReportConfig, Step, add_attachment,", "getattr(test_function, \"__teardown__\") teardown_method = getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] = teardown_method #", "log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\" %", "= self.test_plan[self.test_module] for test_method_name in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param =", "[]) title = getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None) suites = getattr(getattr(self.test_class,", "module under 'suites' directory sets up self.test_class_name self.module_short_name self.test_class -", "self.__prepare_session_vars() # Check requirements for applications for test_module in sorted(self.modules.keys()):", "name, with all configuration options current_test_name = None # test", "= get_long_path_len(self.modules) xunit_path_var = None if kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path')", "for test_module in sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module] =", "of %s' % skip_message skip_test = True return skip_test, skip_msg,", "= True elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')): skip_msg", "def collect_tests1(self, test_method_names, common_test_param={}): \"\"\" Collect given tests from current", "skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] =", "of repeated_tests decorator if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format(", "'test_module': self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir, 'remote': { 'test_module_dir': remote_test_module_dir,", "(%s from %s)\" % (pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self,", "'fail' test_exception = e tb_msg = traceback.format_exc() except Exception as", "class test_class = None # == for current test within", "instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names =", "skip_test = True return skip_test, skip_msg, skip_no_start def get_tests_results(self): return", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "\"__skip_cond__\") conditions_met, skip_message = skip_condition(self.config) if not conditions_met: skip_msg =", "skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\" % (pad_string, test_param['skip_msg']), color='yellow') finally: self.current_test_name", "set of tests from test module for all configurations :param", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\", color='green') except (AssertionError, TidenException) as", "all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir']", "@repeated_test automagically if that's required if self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test']", "__print_with_format(self, msg='', current_method_name=''): if not current_method_name: if self.current_test_method: current_method_name =", "to send report. \\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources", ":return: \"\"\" self.test_module = module_name # fill new module vars", "collect_tests0(self, test_method_names): \"\"\" Collect given set of tests from test", "'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name]", "= 'skipped cause of %s' % skip_message skip_test = True", "conditions_met: skip_msg = 'skipped cause of %s' % skip_message skip_test", "test_status, exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info()", "self.test_plan[self.test_module] if len(test_plan.skipped_tests) > 0: self._skip_tests() if len(test_plan.tests_to_execute) > 0:", "collect_only if fake_init: self.test_class = getattr(module, self.test_class_name) self.test_class.__init__ = fake_init", "pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\"", "current_method_name=fixture_name) log_print('Exception in %s.%s.%s: %s\\n%s' % (self.test_module, self.test_class_name, fixture_name, str(e),", "( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name in", "True skip_no_start = True if hasattr(test_function, \"__skipped__\"): skip_msg = 'skipped", "the Apache License, Version 2.0 (the \"License\"); # you may", "# create attr.yaml for current_test_name in test_method_names: test_function = getattr(self.test_class,", "sec' % (int(time() - started)), current_method_name=fixture_name) log_print('Exception in %s.%s.%s: %s\\n%s'", "'run' %s tests ***\" % ( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ),", "if len(test_plan.tests_to_execute) > 0: tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found", "= test_function.__skip_conds__ for skip_condition in skip_conditions: conditions_met, skip_message = skip_condition(self.test_class)", "None # == for current test within module: # test", "\"__skip_conds__\") and \\ len(test_function.__skip_conds__) > 0: skip_conditions = test_function.__skip_conds__ for", "color='blue') else: cfg_options = None configuration = None test_method_names =", "hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else: cfg_options = getattr(self.test_class, '__configuration_options__').copy() configurations =", "'skipped cause of %s' % skip_message skip_test = True return", "= [] def update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner:", "working directory. :param module_name: name of the module to prepare", "required to convert decorated test function to method of a", "= { 'test_name': self.current_test_name, } test_function = getattr(self.test_class, self.current_test_method) #", "__init__(self, config, **kwargs): if kwargs.get('modules', None) is not None: self.modules", "# test method name only current_test_method = None def __init__(self,", "self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name in tests_to_execute ])),", "glob import glob import traceback class TidenTestPlan: all_tests = None", "test class :param test_class: :return: \"\"\" for class_attr in dir(test_class):", "getattr(module, self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() # Set ssh", "all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name", "[] else: # rare case, skip by --to=repeated_test.test_name=0 is_skipped =", "in skipped_tests: test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class,", "None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute =", "type({}) != type(repeated_test_option): # if option was given as --to=repeated_test=N,", "= None # test method name only current_test_method = None", "current_method_name=''): if not current_method_name: if self.current_test_method: current_method_name = self.current_test_method else:", "SshPool instance ssh_pool = None # Tiden PluginManager instance pm", "get_class_from_module, known_issue_str from .priority_decorator import get_priority_key from .sshpool import SshPool", "test_plan = self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests) try: for current_test in", "longest length of the test name long_path_len = 0 #", "decorated_test = repeated_test(repeat_count)(original_test.__func__) # this magic required to convert decorated", ":param test_method_names: :return: \"\"\" if not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else:", "}) def collect_tests1(self, test_method_names, common_test_param={}): \"\"\" Collect given tests from", "from os import path, mkdir from time import time from", "didn't kill nodes if not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status", "__set_child_steps_to_parent(self): exec_report: InnerReportConfig = getattr(self.test_class, '_secret_report_storage', None) test_report: InnerReportConfig =", "'__configurations__'): self.collect_tests1(test_method_names) else: cfg_options = getattr(self.test_class, '__configuration_options__').copy() configurations = getattr(self.test_class,", "= True if hasattr(test_function, \"__skip_cond__\"): skip_condition = getattr(test_function, \"__skip_cond__\") conditions_met,", "fixture_name) self.__print_with_format('finished in %s sec' % (int(time() - started)), current_method_name=fixture_name)", "= 'pass' pad_string = self.__get_pad_string() started = int(time()) known_issue =", "= test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations = True if", "options from given configuration to Tiden config, # so that", "+ 1)) break finally: self.current_test_name = None self.current_test_method = None", "__prepare_session_vars(self): self.test_plan = {} self.total = TidenTestPlan() def __prepare_module_vars(self, module_name,", "Now generate results for 'executed' tests for test_module in sorted(self.modules.keys()):", "setup method self.__call_test_setup_teardown('setup') # self.__print_with_format() with Step(self, 'Execution'): try: call_method(self.test_class,", "under the License is distributed on an \"AS IS\" BASIS,", "cfg_options = getattr(self.test_class, '__configuration_options__') if configuration is None: configuration =", "decorator or framework option repeat_count = 1 # here, we", "file_name and file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name, AttachmentType.FILE)", "Tests ***\", color='blue') self.__prepare_session_vars() # Check requirements for applications for", "False if type({}) != type(repeated_test_option): # if option was given", "modules. \"\"\" log_print(\"*** Collecting tests ***\", color='blue') long_path_len = get_long_path_len(self.modules)", "length of the test name long_path_len = 0 # instance", "'skipped cause of %s' % test_function.__skipped_message__ skip_test = True if", "= getattr(self.test_class, current_test_name) create_case(test_function) def __prepare_session_vars(self): self.test_plan = {} self.total", "elif self.current_test_method in repeated_test_option.keys(): # otherwise re-decorate only if test", "test name matches given option re_decorate = True repeat_count =", "log_print(\"%s started (%s from %s)\" % (pad_string, test_cnt, len(tests_to_execute)), color='yellow')", "and all_tests[test_method].get('repeated_test_name'): test_dir_name = '{}_{}'.format( test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name", "self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test setup method self.__call_test_setup_teardown('setup') # self.__print_with_format() with", "Step, add_attachment, AttachmentType from .util import log_print, unix_path, call_method, create_case,", "# that test was previously decorated by @repeated_test, extract original", "configurations :param test_method_names: :return: \"\"\" if not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names)", "= 'skipped due to repeated_test iterations <= 0' skip_no_start =", "given test class :param test_class: :return: \"\"\" for class_attr in", "Set ssh and config apps model classes self.test_class.tiden.config = self.config", "for 'executed' tests for test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan", "def __print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module): method_long_name = \"%s.%s.%s \" %", "# } # } modules = None # Tiden config", "test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, })", "teardown_method = getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] = teardown_method # don't forget", "self.__print_with_format('finished in %s sec' % (int(time() - started)), current_method_name=fixture_name) #", "self.config) @staticmethod def gen_tests(test_class): \"\"\" Generates all test method of", "else: self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config = config self.long_path_len =", "self.current_test_name = None self.current_test_method = None def _run_tests(self, tests_to_execute): test_plan", "# if exception in setup method then re-raise the exception", "for current test module: # a short name of test", "if test_with_iterations and test_status != 'pass' and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test,", "extract original test_names original_names = original_test.repeated_test_name decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__)", "# == for current test within module: # test name,", "Execute test module setup/teardown fixture. :param fixture_name: either 'setup' or", "self.ssh_pool) else: # for process tests - prepare test directory", "indexed by test module name test_plan = {} # ==", "current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute setup or teardown method call_method(self.test_class, fixture_name)", "test_plan = {} # == for current test module: #", "msg='', current_method_name=''): if not current_method_name: if self.current_test_method: current_method_name = self.current_test_method", "False if is_skipped: test_param.update({ 'skip_msg': skip_msg, 'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name)", "test_plan = self.test_plan[self.test_module] if len(test_plan.skipped_tests) > 0: self._skip_tests() if len(test_plan.tests_to_execute)", "0: self._skip_tests() if len(test_plan.tests_to_execute) > 0: tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class))", "importlib import import_module from os import path, mkdir from time", "= self.test_plan[self.test_module] try: for test_cnt, current_test in enumerate(tests_to_execute, start=1): test_param", "self._save_config() def _check_test_for_skip(self): attribs = [] skip_test = False skip_msg", "break test_report.steps[idx_to_add]['children'] = exec_report.steps + test_report.steps[idx_to_add].get('children', []) title = getattr(getattr(self.test_class,", "in %s sec' % (int(time() - started)), current_method_name=fixture_name) log_print('Exception in", "= setup_method # next, teardown fixture if hasattr(test_function, \"__teardown__\"): teardown_fixture", "method_long_name = \"%s.%s.%s \" % (test_module, self.test_class_name, self.current_test_name) pad_string =", "original_test = test_function if hasattr(original_test, 'repeated_test_name'): # that test was", "= None # == TidenTestPlan for all modules: total =", "None # dictionary of TidenTestPlan indexed by test module name", "and remote directories. Copies resources from suite directory to local", "title: test_report.title = title test_report.suites = suites setattr(self, '_secret_report_storage', test_report)", "\"\"\" Generates all test method of given test class :param", "\"\"\" Prepare per-module initialization of internal variables: Expects self.test_module be", "'error' test_exception = e tb_msg = traceback.format_exc() finally: if test_status", "if repeat_count == 1: # don't rename tests when only", "\"__setup__\") if type(setup_fixture) == type(''): setup_method = getattr(self.test_class, setup_fixture) else:", "test can check options and skip itself set_configuration_options(cfg_options, self.config, configuration)", "= \"%s.%s.%s \" % (test_module, self.test_class_name, self.current_test_name) pad_string = method_long_name.ljust(long_path_len,", "self.ssh_pool self.test_class.config = self.config self.test_class.ssh = self.ssh_pool self._save_config() def __prepare_test_vars(self,", "= unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}'])", "self._run_test() if test_with_iterations and test_status != 'pass' and not repeated_test_continue_on_fail:", "in attribs: skip_msg = 'skipped cause test is MUTED' known_issue", "# Check requirements for applications for test_module in sorted(self.modules.keys()): module", "ANY KIND, either express or implied. # See the License", "= getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] = teardown_method # don't forget known", "case class test_class = None # == for current test", "self.config['repeated_test'] re_decorate = False if type({}) != type(repeated_test_option): # if", "int(repeated_test_option[self.current_test_method]) if re_decorate: from tiden.util import repeated_test original_test = test_function", "the License. # You may obtain a copy of the", "in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self, host_ip): for line in output_lines:", "self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\" % (self.config['suite_dir'],", "# See the License for the specific language governing permissions", "test case class test_class = None # == for current", "self.test_module be set to proper full name of module under", "in sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module)", "module file name without .py extension module_short_name = None #", "range(repeated_test_count): if test_with_iterations: log_print(\"{} started (iteration {} from {})\".format(pad_string, self.test_iteration", "self.config = config self.ssh = ssh_pool self.__prepare_session_vars() for test_module in", "%s tests in %s. %s skipped. Going to run %s", "get_long_path_len, get_class_from_module, known_issue_str from .priority_decorator import get_priority_key from .sshpool import", "get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module, test_class_name)(self.config, self.ssh_pool) if hasattr(test_class, 'check_requirements'): test_class.check_requirements()", ".priority_decorator import get_priority_key from .sshpool import SshPool from uuid import", "None # longest length of the test name long_path_len =", "test_params = { 'test_name': self.current_test_name, } test_function = getattr(self.test_class, self.current_test_method)", "(self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't create symlink to", "only once, # unless repeated_test_count set explicitly by decorator or", "setup_passed = self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute) # Execute module teardown", "test_status = 'fail' test_exception = e tb_msg = traceback.format_exc() except", "--to=repeated_test.test_name=0 is_skipped = True skip_msg = 'skipped due to repeated_test", "tests :return: \"\"\" log_print(\"*** Tests ***\", color='blue') self.__prepare_session_vars() # Check", "matches given option re_decorate = True repeat_count = int(repeated_test_option[self.current_test_method]) if", "str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s' % fixture_name) return fixture_passed def _call_plugin_manager(self,", ".runner import set_configuration_options, get_configuration_representation, get_actual_configuration from importlib import import_module from", "a test class from types import MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test,", "'mkdir -p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln -sfn %s", "getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None) suites = getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None)", "skip_test = True if hasattr(test_function, \"__skip_cond__\"): skip_condition = getattr(test_function, \"__skip_cond__\")", "is None: cfg_options = getattr(self.test_class, '__configuration_options__') if configuration is None:", "self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) test_plan = self.test_plan[self.test_module]", "all_tests = self.test_plan[self.test_module].all_tests # cause of repeated_tests decorator if all_tests.get(test_method)", "known_issue) skip_test = True skip_no_start = True elif self.config.get('attrib') and", "if hasattr(test_function, \"__teardown__\"): teardown_fixture = getattr(test_function, \"__teardown__\") teardown_method = getattr(self.test_class,", "collect_tests1(self, test_method_names, common_test_param={}): \"\"\" Collect given tests from current test", "self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s", "pm = None # longest length of the test name", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "{test_dir}\"]).items(): with Step(self, host_ip): for line in output_lines: file_name: str", "-p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln -sfn %s %s/current_test_directory'", "all tests :return: \"\"\" log_print(\"*** Tests ***\", color='blue') self.__prepare_session_vars() #", "directories. Copies resources from suite directory to local test module", "in glob(\"%s/*\" % self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] =", "<test_file_name>, # } # } modules = None # Tiden", "writing, software # distributed under the License is distributed on", "test_dir: try: for host_ip, output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self,", "finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report: InnerReportConfig = getattr(self.test_class, '_secret_report_storage',", "% (test_module, self.test_class_name, self.current_test_name) pad_string = method_long_name.ljust(long_path_len, '.') log_print(\"%s found", "= self.ssh_pool self._save_config() def __prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None, **kwargs): if", "test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module, test_class_name)(self.config, self.ssh_pool) if", "str( configuration[i]) for i, cfg_option_name in enumerate(cfg_options) ])), color='blue') else:", "} test_function = getattr(self.test_class, self.current_test_method) # first setup fixture if", "for line in output_lines: file_name: str for file_name in line.split('\\n'):", "# current test module, a key to self.modules dictionary test_module", "repeated_test(repeat_count)(original_test.__func__) # this magic required to convert decorated test function", "e tb_msg = traceback.format_exc() except Exception as e: test_status =", "# limitations under the License. from .tidenpluginmanager import PluginManager from", "\"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir = \"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name)", "\\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources in test resource", "log_print(\"*** Collecting tests ***\", color='blue') long_path_len = get_long_path_len(self.modules) from tiden.sshpool", "self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't create symlink to current", "current test module, a key to self.modules dictionary test_module =", "Going to 'run' %s tests ***\" % ( len(self.total.all_tests), len(self.total.skipped_tests),", "self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self, host_ip): for line in output_lines: file_name:", "raise e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report: InnerReportConfig =", "= kwargs.get('modules') else: self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config = config", "as --to=repeated_test=N, re-decorate all tests re_decorate = True repeat_count =", "test module working directory. :param module_name: name of the module", "to self.modules dictionary test_module = None # == TidenTestPlan for", "as e: test_status = 'fail' test_exception = e tb_msg =", "# 'path': <full-path-to-test-file>, # 'module_short_name': <test_file_name>, # } # }", "= None def __init__(self, config, **kwargs): if kwargs.get('modules', None) is", "default runs only once, # unless repeated_test_count set explicitly by", "= False if type({}) != type(repeated_test_option): # if option was", "self.config.get('attr_match', 'any')): skip_msg = 'skipped cause of attrib mismatch' skip_test", "Going to run %s tests ***\\n%s\" % ( len(test_plan.all_tests), self.test_class_name,", "and test is not marked with one of the attribute", "True repeat_count = int(repeated_test_option) elif self.current_test_method in repeated_test_option.keys(): # otherwise", "in test resource directory :return: \"\"\" test_resource_dir = \"%s/res\" %", "'_secret_report_storage') idx_to_add = None for idx, test_step in enumerate(test_report.steps): if", "= True return skip_test, skip_msg, skip_no_start def get_tests_results(self): return self.result", "finally: self.current_test_name = None self.current_test_method = None def _run_tests(self, tests_to_execute):", "new decoration decorated_test = repeated_test(repeat_count)(original_test.__func__) # this magic required to", "in repeated_test_option.keys(): # otherwise re-decorate only if test name matches", "None # Tiden config dictionary config = None # Tiden", "instance of current module' test case class test_class = None", ":return: \"\"\" if not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else: cfg_options =", "= None # longest length of the test name long_path_len", "None: self.modules = kwargs.get('modules') else: self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config", "__prepare_module_vars(self, module_name, fake_init=None): \"\"\" Prepare per-module initialization of internal variables:", "test_dir = self.config.get('rt', {}).get('remote', {}).get('test_dir') if 'WardReport' in self.config.get('plugins', []):", "test_module): method_long_name = \"%s.%s.%s \" % (test_module, self.test_class_name, self.current_test_name) pad_string", "%s)\" % (pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self, msg='', current_method_name=''):", "'.') log_print(\"%s found (%s from %s)\" % (pad_string, test_cnt, len(self.total.tests_to_execute)),", "to get logs\\n{traceback.format_exc()}', color='pink') # if exception in setup method", "None def __print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module): method_long_name = \"%s.%s.%s \"", "from all modules. \"\"\" log_print(\"*** Collecting tests ***\", color='blue') long_path_len", "cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module) # find test", "= [] self.tests_to_execute = [] def update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests)", "Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool = kwargs.get('ssh_pool') self.pm: PluginManager = kwargs.get('plugin_manager') def", "upload_logs = report_config['upload_logs'] else: return if test_dir: try: for host_ip,", "(self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir = \"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name =", "{test_dir}; ' \\ f'curl -H \"filename: {send_file_name}\" ' \\ f'-F", "self.current_test_name = None def __print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module): method_long_name =", "except: log_print(f'Failed to get logs\\n{traceback.format_exc()}', color='pink') # if exception in", "from suite directory to local test module working directory. :param", "re_decorate = True repeat_count = int(repeated_test_option[self.current_test_method]) if re_decorate: from tiden.util", "that's a brand new decoration decorated_test = repeated_test(repeat_count)(original_test.__func__) # this", "@repeated_test, extract original test_names original_names = original_test.repeated_test_name decorated_test = repeated_test(repeat_count,", "skip_test = True skip_no_start = True if hasattr(test_function, \"__skipped__\"): skip_msg", "self.module_short_name[5:]) for file in glob(\"%s/*\" % self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file,", "import write_yaml_file, should_be_skipped from .logger import * from .runner import", "'skipped cause test is MUTED' known_issue = None if hasattr(test_function,", "# { # '<suite_name>.<test_file_name>': { # 'path': <full-path-to-test-file>, # 'module_short_name':", "idx_to_add = idx break test_report.steps[idx_to_add]['children'] = exec_report.steps + test_report.steps[idx_to_add].get('children', [])", "}) test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy()", "self.modules[self.test_module]['module_short_name'] test_module_dir = \"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir = \"%s/%s\"", "self.test_class_name, 'test_method': None, 'test_module': self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir, 'remote':", "str(e), str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s' % fixture_name) return fixture_passed def", "by --to=repeated_test.test_name=0 is_skipped = True skip_msg = 'skipped due to", "e: fixture_passed = False self.__print_with_format('failed in %s sec' % (int(time()", "self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'):", "to repeated_test iterations <= 0' skip_no_start = False if is_skipped:", "'cfg_options': cfg_options, }) test_plan = self.test_plan[self.test_module] if len(test_plan.skipped_tests) > 0:", "'pass': log_print(tb_msg, color='red') log_print(\"{} {} {}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)), color='red')", "% (self.config['suite_dir'], self.module_short_name[5:]) for file in glob(\"%s/*\" % self.config['rt']['resource_dir']): if", "test module working local and remote directories. Copies resources from", "test_module_dir) self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory()", "__prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None, **kwargs): if not test_method_name: return self.test_iteration", "elif config.get('var_dir') and config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'), config.get('xunit_file')) self.result =", "xunit_path_var = None if kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path') elif config.get('var_dir')", "\"__teardown__\") teardown_method = getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] = teardown_method # don't", "test_method_name = test_method.split('(')[0] if '(' in test_method else test_method test_dir_name", "fail the test if method_name == 'setup': raise e finally:", "import repeated_test original_test = test_function if hasattr(original_test, 'repeated_test_name'): # that", "__print_current_module_name(self): log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self, msg=None): return", "Found %s tests. %s skipped. Going to 'run' %s tests", "import traceback class TidenTestPlan: all_tests = None skipped_tests = None", "\"filename: {send_file_name}\" ' \\ f'-F \"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip,", "from .runner import get_test_modules, get_long_path_len, get_class_from_module, known_issue_str from .priority_decorator import", "+ 1, repeated_test_count), color='yellow') test_status = self._run_test() if test_with_iterations and", "in %s sec' % (int(time() - started)), current_method_name=fixture_name) # except", "repeat_count = test_param.get('repeated_test_count', 1) if repeat_count > 0: if repeat_count", ":param fake_init: do not init module :return: \"\"\" self.test_module =", "remote_test_module_dir, } } module = import_module(\"suites.%s\" % self.test_module) # used", "= getattr(self, '_secret_report_storage') idx_to_add = None for idx, test_step in", "module_name, fake_init=None): \"\"\" Prepare per-module initialization of internal variables: Expects", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "for file_name in line.split('\\n'): if file_name and file_name.endswith('.log'): send_file_name =", "def collect_tests(self): \"\"\" Collect tests from all modules. \"\"\" log_print(\"***", "and config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'), config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var) self.ssh_pool:", "current_test_name in test_method_names: test_function = getattr(self.test_class, current_test_name) create_case(test_function) def __prepare_session_vars(self):", "# self.__print_with_format() with Step(self, 'Execution'): try: call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent()", "self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name)", "module :return: \"\"\" self.test_module = module_name # fill new module", "= None # Tiden PluginManager instance pm = None #", "setup_fixture = getattr(test_function, \"__setup__\") if type(setup_fixture) == type(''): setup_method =", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "'setup' or 'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed =", "'test_module_dir': test_module_dir, 'remote': { 'test_module_dir': remote_test_module_dir, } } module =", "self.skipped_tests = [] self.tests_to_execute = [] def update(self, other): self.all_tests.update(other.all_tests)", "\"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:]) for file in glob(\"%s/*\" % self.config['rt']['resource_dir']):", "skip_conditions = test_function.__skip_conds__ for skip_condition in skip_conditions: conditions_met, skip_message =", "Execute test setup method self.__call_test_setup_teardown('setup') # self.__print_with_format() with Step(self, 'Execution'):", "= None def _run_tests(self, tests_to_execute): test_plan = self.test_plan[self.test_module] try: for", "mismatch' skip_test = True skip_no_start = True if hasattr(test_function, \"__skipped__\"):", "(%s from %s)\" % (pad_string, test_cnt, len(tests_to_execute)), color='yellow') for self.test_iteration", "> 0: self._skip_tests() if len(test_plan.tests_to_execute) > 0: tests_to_execute = sorted(test_plan.tests_to_execute,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "java process if teardown function didn't kill nodes if not", "hasattr(test_function, \"__attrib__\"): attribs = getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) # if attr", "self.test_plan[self.test_module].all_tests # cause of repeated_tests decorator if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'):", "def __get_pad_string(self, msg=None): return (\"%s.%s.%s \" % ( self.test_module, self.test_class_name,", "of Result class result = None # current test module,", "from tiden.util import repeated_test original_test = test_function if hasattr(original_test, 'repeated_test_name'):", "+ '=' + str( configuration[i]) for i, cfg_option_name in enumerate(cfg_options)", "= f'cd {test_dir}; ' \\ f'curl -H \"filename: {send_file_name}\" '", "# cause of repeated_tests decorator if all_tests.get(test_method) and all_tests[test_method].get('repeated_test_name'): test_dir_name", "that test was previously decorated by @repeated_test, extract original test_names", "repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False) return test_params def _skip_tests(self): test_plan", "% (self.test_class.__class__.__name__, '\\n'.join([ '\\t' + cfg_option_name + '=' + str(", "'__configurations__'): if cfg_options is None: cfg_options = getattr(self.test_class, '__configuration_options__') if", "should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')): skip_msg = 'skipped cause of attrib", "for %s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([ '\\t' + cfg_option_name + '='", "-p {remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self, method_name): method_to_execute = None try:", "self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s", "if 'mute' in attribs: skip_msg = 'skipped cause test is", "self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) def collect_tests1(self, test_method_names,", "type(setup_fixture) == type(''): setup_method = getattr(self.test_class, setup_fixture) else: setup_method =", "{} from {})\".format(pad_string, self.test_iteration + 1, repeated_test_count), color='yellow') test_status =", "= sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found %s tests in %s. %s", "def __prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None, **kwargs): if not test_method_name: return", "re_decorate = False if type({}) != type(repeated_test_option): # if option", "send_file_name, AttachmentType.FILE) if upload_logs: cmd = f'cd {test_dir}; ' \\", "= teardown_method # don't forget known issues if hasattr(test_function, \"__known_issues__\"):", "0: skip_conditions = test_function.__skip_conds__ for skip_condition in skip_conditions: conditions_met, skip_message", "in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan = self.test_plan[self.test_module] for test_name in", "directory. :param module_name: name of the module to prepare :param", "import join, basename from glob import glob import traceback class", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "of TidenTestPlan indexed by test module name test_plan = {}", "import PluginManager from .report.steps import step, InnerReportConfig, Step, add_attachment, AttachmentType", "getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None) if title: test_report.title = title test_report.suites", "test_status, e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info') else None", "--to=repeated_test=N and --to=repeated_test.test_name=N options # and decorate test with @repeated_test", "self.__get_pad_string() log_print(\"%s started (%s from %s)\" % (pad_string, test_cnt, len(tests_to_execute)),", "specific language governing permissions and # limitations under the License.", "def _call_plugin_manager(self, execution_point): args = [self.test_module, self.test_class] if self.current_test_method: args.append(self.current_test_method)", "test by default runs only once, # unless repeated_test_count set", "if hasattr(self.test_class, fixture_name): started = time() try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name)", "process tests - prepare test directory and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir)", ":return: \"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed = True try: if", "current_test in skipped_tests: test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method)", "traceback import format_exc from .runner import set_configuration_options, get_configuration_representation, get_actual_configuration from", "self.config self.test_class.tiden.ssh = self.ssh_pool self.test_class.config = self.config self.test_class.ssh = self.ssh_pool", "= { 'test_class': self.test_class_name, 'test_method': None, 'test_module': self.test_module, 'test_module_name': self.module_short_name,", "= repeated_test(repeat_count)(original_test.__func__) # this magic required to convert decorated test", "'' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\"", ") # Execute test teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception,", "= None def __init__(self): self.all_tests = {} self.skipped_tests = []", "kill_stalled_java, exec_time from .result import Result from .util import write_yaml_file,", "TidenRunner: # { # '<suite_name>.<test_file_name>': { # 'path': <full-path-to-test-file>, #", "report_config = self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url'] upload_logs = report_config['upload_logs'] else:", "test_method_names, common_test_param={}): \"\"\" Collect given tests from current test module", "for test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module, fake_init=empty_init) test_plan = self.test_plan[self.test_module] for", "if hasattr(test_function, 'repeated_test_count'): repeat_count = test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count']", "setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class,", "config, **kwargs): if kwargs.get('modules', None) is not None: self.modules =", "% (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir = \"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name", "e: except Exception as e: fixture_passed = False self.__print_with_format('failed in", "self.long_path_len = get_long_path_len(self.modules) xunit_path_var = None if kwargs.get('xunit_path'): xunit_path_var =", "# instance of current module' test case class test_class =", "applications for test_module in sorted(self.modules.keys()): module = import_module(\"suites.%s\" % test_module)", "# you may not use this file except in compliance", "getattr(self.test_class, '_secret_report_storage', None) test_report: InnerReportConfig = getattr(self, '_secret_report_storage') idx_to_add =", "module setup/teardown fixture. :param fixture_name: either 'setup' or 'teardown' :return:", "known issues if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") test_params['known_issue']", "True skip_msg = 'skipped due to repeated_test iterations <= 0'", "fills in config['rt'], config['rt']['remote'] Creates test module working local and", "# a short name of test module, e.g. test module", "is None: idx_to_add = idx break test_report.steps[idx_to_add]['children'] = exec_report.steps +", "not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else: cfg_options = getattr(self.test_class, '__configuration_options__').copy() configurations", "repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations = True if repeated_test_count > 1", "current test module :param test_method_names: :param common_test_param: :return: \"\"\" try:", "all configurations :param test_method_names: :return: \"\"\" if not hasattr(self.test_class, '__configurations__'):", "original_test.repeated_test_name decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: # that's a brand", "Prepare per-module initialization of internal variables: Expects self.test_module be set", "stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) # Kill", "= self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if", "if title: test_report.title = title test_report.suites = suites setattr(self, '_secret_report_storage',", "creates instance of test case class resets self.all_tests, self.tests_to_execute, self.skipped_tests", "self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test'] re_decorate = False if type({}) !=", "module :param test_method_names: :param common_test_param: :return: \"\"\" try: test_plan =", ":param test_class: :return: \"\"\" for class_attr in dir(test_class): if class_attr.startswith('test_'):", "a brand new decoration decorated_test = repeated_test(repeat_count)(original_test.__func__) # this magic", "current_test in enumerate(tests_to_execute, start=1): test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count =", "if hasattr(test_function, \"__attrib__\"): attribs = getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) # if", "= get_long_path_len(self.modules) from tiden.sshpool import AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts': []})", "Tiden config, # so that test can check options and", "except: log_print(f'Failed to send report. \\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\"", "module' test class test_class_name = None # instance of current", "'_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name): \"\"\" Execute test module setup/teardown", "@staticmethod def gen_tests(test_class): \"\"\" Generates all test method of given", "self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info') else", "!!!', color='red') log_print(traceback.format_exc()) try: self.__save_logs() except: log_print(f'Failed to get logs\\n{traceback.format_exc()}',", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self, host_ip): for line in", "decorated by @repeated_test, extract original test_names original_names = original_test.repeated_test_name decorated_test", "tb_msg = traceback.format_exc() finally: if test_status != 'pass': log_print(tb_msg, color='red')", "setup method then re-raise the exception as we should fail", "= getattr(test_function, \"__known_issues__\") test_params['known_issue'] = known_issue # test by default", "= 1 # here, we check --to=repeated_test=N and --to=repeated_test.test_name=N options", "self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: # { # '<suite_name>.<test_file_name>': { # 'path':", "InnerReportConfig, Step, add_attachment, AttachmentType from .util import log_print, unix_path, call_method,", "TidenTestPlan indexed by test module name test_plan = {} #", "'\\n'.join([ '\\t' + cfg_option_name + '=' + str( configuration[i]) for", "\"__skipped__\"): skip_msg = 'skipped cause of %s' % test_function.__skipped_message__ skip_test", "under the Apache License, Version 2.0 (the \"License\"); # you", "from os.path import join, basename from glob import glob import", "class_attr def collect_tests0(self, test_method_names): \"\"\" Collect given set of tests", "= {} self.total = TidenTestPlan() def __prepare_module_vars(self, module_name, fake_init=None): \"\"\"", "= suites setattr(self, '_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self,", "%s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln -sfn %s %s/current_test_directory' %", "test_class_name = None # instance of current module' test case", "file_name: str for file_name in line.split('\\n'): if file_name and file_name.endswith('.log'):", "cfg_option_name + '=' + str( configuration[i]) for i, cfg_option_name in", ") self.config['rt']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try: create_remote_dir =", "= TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init) self.__print_current_module_name() test_method_names = sorted(list(self.gen_tests(self.test_class))) self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names)", "if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir,", "config['rt']['remote'] Creates test module working local and remote directories. Copies", "(int(time() - started)), current_method_name=fixture_name) # except (AssertionError, TidenException) as e:", "teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method,", "from {})\".format(pad_string, self.test_iteration + 1, repeated_test_count), color='yellow') test_status = self._run_test()", "configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) def collect_tests1(self,", "hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") test_params['known_issue'] = known_issue #", "{} {}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg,", "self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: # { # '<suite_name>.<test_file_name>': { #", "test_step['status'] is None: idx_to_add = idx break test_report.steps[idx_to_add]['children'] = exec_report.steps", "def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources in test resource directory :return:", "not setup_passed: exit(1) def create_test_module_attr_yaml(self, test_method_names): # create attr.yaml for", "config self.long_path_len = get_long_path_len(self.modules) xunit_path_var = None if kwargs.get('xunit_path'): xunit_path_var", "then skip it. if 'mute' in attribs: skip_msg = 'skipped", "instance pm = None # longest length of the test", "log_print(\"*** Found %s tests in %s. %s skipped. Going to", "***\" % ( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue') test_cnt =", "\\ f'curl -H \"filename: {send_file_name}\" ' \\ f'-F \"file=@{file_name};filename={file_name}\" '", "= getattr(self.test_class, '_secret_report_storage', None) test_report: InnerReportConfig = getattr(self, '_secret_report_storage') idx_to_add", "% (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln -sfn %s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'],", "\"\"\" Collect given tests from current test module :param test_method_names:", "given as --to=repeated_test=N, re-decorate all tests re_decorate = True repeat_count", "self.config['rt']['resource_dir'] = \"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:]) for file in glob(\"%s/*\"", "} # } modules = None # Tiden config dictionary", "'__configuration_options__') if configuration is None: configuration = get_actual_configuration(self.config, cfg_options) configuration_representation", "= None # current test module, a key to self.modules", "\"\"\" log_print(\"*** Tests ***\", color='blue') self.__prepare_session_vars() # Check requirements for", "self.config['rt'] = { 'test_class': self.test_class_name, 'test_method': None, 'test_module': self.test_module, 'test_module_name':", "self.test_class] if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args) def __update_config_and_save(self, current_method_name=None): test_method", "cfg_options=None, **kwargs): if not test_method_name: return self.test_iteration = 1 self.current_test_method", "attribs = [] skip_test = False skip_msg = None skip_no_start", "= 'skipped cause of %s' % test_function.__skipped_message__ skip_test = True", "conditions_met, skip_message = skip_condition(self.config) if not conditions_met: skip_msg = 'skipped", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "None, 'test_module': self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir, 'remote': { 'test_module_dir':", "= getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None) if title: test_report.title = title", "from shutil import copyfile from os.path import join, basename from", "len(self.total.tests_to_execute) ), color='blue') test_cnt = 0 # Skipped tests do", "to proper full name of module under 'suites' directory sets", "AttachmentType.FILE) if upload_logs: cmd = f'cd {test_dir}; ' \\ f'curl", "fixture_passed = True try: if hasattr(self.test_class, fixture_name): started = time()", "# test by default runs only once, # unless repeated_test_count", "% (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't create symlink", "file name without .py extension module_short_name = None # a", "skipped_tests: test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name,", "= self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests) try: for current_test in skipped_tests:", "glob(\"%s/*\" % self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\") self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir)", "modules: total = None # dictionary of TidenTestPlan indexed by", "if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute() except Exception as e: log_print(f'!!!", "self.module_short_name, 'test_module_dir': test_module_dir, 'remote': { 'test_module_dir': remote_test_module_dir, } } module", "if test name matches given option re_decorate = True repeat_count", "test_step in enumerate(test_report.steps): if test_step['status'] is None: idx_to_add = idx", "test_report.steps[idx_to_add].get('children', []) title = getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None) suites =", "test_module in sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module] = TidenTestPlan()", "test_status = 'pass' pad_string = self.__get_pad_string() started = int(time()) known_issue", "do not hit collect report # Now generate results for", "% test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module, test_class_name)(self.config, self.ssh_pool)", "== 'setup': raise e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report:", "suites setattr(self, '_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name):", "enumerate(test_report.steps): if test_step['status'] is None: idx_to_add = idx break test_report.steps[idx_to_add]['children']", "Tiden PluginManager instance pm = None # longest length of", "get_long_path_len(self.modules) xunit_path_var = None if kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path') elif", "= \"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir = \"%s/%s\" % (self.config['remote']['suite_var_dir'],", "(\"%s.%s.%s \" % ( self.test_module, self.test_class_name, msg if msg else", "color='red') self._save_config() def _check_test_for_skip(self): attribs = [] skip_test = False", "= title test_report.suites = suites setattr(self, '_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage',", "except Exception as e: log_print(f'!!! Exception in {method_name} code !!!',", "Step(self, 'Execution'): try: call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed", "= True if repeated_test_count > 1 else False pad_string =", "len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue') test_cnt = 0 # Skipped tests", "Collect given tests from current test module :param test_method_names: :param", "# Set ssh and config apps model classes self.test_class.tiden.config =", "None # == TidenTestPlan for all modules: total = None", "tests_to_execute = sorted(test_plan.tests_to_execute, key=get_priority_key(self.test_class)) log_print(\"*** Found %s tests in %s.", "if hasattr(self.test_class, '__configurations__'): cfg_options = getattr(self.test_class, '__configuration_options__') configuration = get_actual_configuration(self.config,", "self.current_test_name = None self.current_test_method = None def _run_test(self): setattr(self, '_secret_report_storage',", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "test_param.get('repeated_test_count', 1) if repeat_count > 0: if repeat_count == 1:", "= None self.current_test_method = None def _run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig())", "from %s)\" % (pad_string, test_cnt, len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self, msg='',", "self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed to send report. \\n{format_exc()}', color='pink') def", "module = import_module(\"suites.%s\" % test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class =", "self.modules dictionary test_module = None # == TidenTestPlan for all", "was previously decorated by @repeated_test, extract original test_names original_names =", "else: self.current_test_name = self.current_test_method def collect_test0(self): # collect test params", "# Execute test setup method self.__call_test_setup_teardown('setup') # self.__print_with_format() with Step(self,", "cause of {}'.format(skip_msg, known_issue) skip_test = True skip_no_start = True", "suites = getattr(getattr(self.test_class, self.current_test_method), '__report_suites__', None) if title: test_report.title =", "getattr(module, self.test_class_name)(self.config, self.ssh_pool) else: # for process tests - prepare", "def __init__(self): self.all_tests = {} self.skipped_tests = [] self.tests_to_execute =", "repeat_count = 1 # here, we check --to=repeated_test=N and --to=repeated_test.test_name=N", "try: for host_ip, output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self, host_ip):", "f'-F \"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed to", "iteration requested test_param['repeated_test_name'] = [] else: # rare case, skip", "for i, cfg_option_name in enumerate(cfg_options) ])), color='blue') else: cfg_options =", "Update Tiden config self.config['rt'] = { 'test_class': self.test_class_name, 'test_method': None,", "fixture_name, str(e), str(traceback.format_exc())), color='red') finally: self._call_plugin_manager('after_test_class_%s' % fixture_name) return fixture_passed", "= {} self.skipped_tests = [] self.tests_to_execute = [] def update(self,", "Apache License, Version 2.0 (the \"License\"); # you may not", "args.append(self.current_test_method) self.pm.do(execution_point, *args) def __update_config_and_save(self, current_method_name=None): test_method = current_method_name if", "either express or implied. # See the License for the", "repeat_count test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False) return test_params", "from %s)\" % (pad_string, test_cnt, len(tests_to_execute)), color='yellow') for self.test_iteration in", "from current test module :param test_method_names: :param common_test_param: :return: \"\"\"", "attribs = getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) # if attr is passed", "% skip_message skip_test = True return skip_test, skip_msg, skip_no_start def", "' \\ f'-F \"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except:", "fixture if hasattr(test_function, \"__setup__\"): setup_fixture = getattr(test_function, \"__setup__\") if type(setup_fixture)", "traceback.format_exc() finally: if test_status != 'pass': log_print(tb_msg, color='red') log_print(\"{} {}", "test_method_name, } is_skipped, skip_msg, skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count =", "prepare test directory and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class = getattr(module,", "automagically if that's required if self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test'] re_decorate", "self.__prepare_module_vars(test_module) # find test methods: if hasattr(self.test_class, '__configurations__'): cfg_options =", "self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) else: # for process tests", "'\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name in tests_to_execute ])), color='blue') # Execute", "current_method_name=None): test_method = current_method_name if current_method_name else self.current_test_method test_method_name =", "'test_name': self.current_test_name, } test_function = getattr(self.test_class, self.current_test_method) # first setup", "self.create_test_module_attr_yaml(test_method_names) self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s tests. %s skipped. Going", "if that's required if self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test'] re_decorate =", "all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute() except Exception", "import Result from .util import write_yaml_file, should_be_skipped from .logger import", "with one of the attribute # then skip it. if", "self.test_class = getattr(module, self.test_class_name) self.test_class.__init__ = fake_init self.test_class = getattr(module,", "in line.split('\\n'): if file_name and file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}' add_attachment(self,", "configuration = None test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration,", "= None tests_to_execute = None def __init__(self): self.all_tests = {}", "\\ f'-F \"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd]) except: log_print(f'Failed", "started = time() try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute setup", "idx break test_report.steps[idx_to_add]['children'] = exec_report.steps + test_report.steps[idx_to_add].get('children', []) title =", "conditions_met, skip_message = skip_condition(self.test_class) if not conditions_met: skip_msg = 'skipped", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "test params test_params = { 'test_name': self.current_test_name, } test_function =", "for correct fail in Jenkins if not setup_passed: exit(1) def", "= TidenTestPlan() self.__prepare_module_vars(test_module) # find test methods: if hasattr(self.test_class, '__configurations__'):", "= 'fail' test_exception = e tb_msg = traceback.format_exc() except Exception", "send report. \\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy resources in", "set_configuration_options, get_configuration_representation, get_actual_configuration from importlib import import_module from os import", "when only one iteration requested test_param['repeated_test_name'] = [] else: #", "self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass') def process_tests(self): \"\"\" Run all", "test_method_name all_tests = self.test_plan[self.test_module].all_tests # cause of repeated_tests decorator if", "'test_module_dir': remote_test_module_dir, } } module = import_module(\"suites.%s\" % self.test_module) #", "self.collect_tests0(test_method_names) self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s tests. %s skipped. Going to", "if hasattr(self.test_class, 'get_run_info') else None ) # Execute test teardown", "len(test_plan.skipped_tests) > 0: self._skip_tests() if len(test_plan.tests_to_execute) > 0: tests_to_execute =", "self.current_test_name = self.current_test_method + configuration_representation else: self.current_test_name = self.current_test_method def", "exec_report.steps + test_report.steps[idx_to_add].get('children', []) title = getattr(getattr(self.test_class, self.current_test_method), '__report_title__', None)", "test_method_name if hasattr(self.test_class, '__configurations__'): if cfg_options is None: cfg_options =", "import get_priority_key from .sshpool import SshPool from uuid import uuid4", "from .util import log_print, unix_path, call_method, create_case, kill_stalled_java, exec_time from", "= None try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute", ".sshpool import SshPool from uuid import uuid4 from traceback import", "import * from .runner import get_test_modules, get_long_path_len, get_class_from_module, known_issue_str from", "= getattr(test_function, \"__teardown__\") teardown_method = getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] = teardown_method", "str(test_dir_name)), 'ln -sfn %s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir)", "= \"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try: create_remote_dir = [ 'mkdir", "# # Copyright 2017-2020 GridGain Systems. # # Licensed under", "# test name, with all configuration options current_test_name = None", "module to prepare :param fake_init: do not init module :return:", "else: return if test_dir: try: for host_ip, output_lines in self.ssh_pool.exec([f\"ls", "collect test params test_params = { 'test_name': self.current_test_name, } test_function", "test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False) return test_params def _skip_tests(self): test_plan =", "set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, })", "only current_test_method = None def __init__(self, config, **kwargs): if kwargs.get('modules',", "repeated_test_count = test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations = True", "None configuration = None test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration':", "'{} cause of {}'.format(skip_msg, known_issue) skip_test = True skip_no_start =", "known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name,", "if is_skipped: test_param.update({ 'skip_msg': skip_msg, 'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else:", "test_class = getattr(module, test_class_name)(self.config, self.ssh_pool) if hasattr(test_class, 'check_requirements'): test_class.check_requirements() for", "int(repeated_test_option) elif self.current_test_method in repeated_test_option.keys(): # otherwise re-decorate only if", "len(self.total.tests_to_execute)), color='yellow') def __print_with_format(self, msg='', current_method_name=''): if not current_method_name: if", "import MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class)) test_function = getattr(self.test_class, self.current_test_method)", "InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name): \"\"\" Execute test module setup/teardown fixture.", "get_priority_key from .sshpool import SshPool from uuid import uuid4 from", "= list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) test_plan", "= getattr(module, test_class_name)(self.config, self.ssh_pool) if hasattr(test_class, 'check_requirements'): test_class.check_requirements() for test_module", "# otherwise re-decorate only if test name matches given option", "attr is passed to runner and test is not marked", "\"__teardown__\"): teardown_fixture = getattr(test_function, \"__teardown__\") teardown_method = getattr(self.test_class, teardown_fixture) test_params['teardown_test_method']", "'_secret_report_storage', test_report) setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name): \"\"\" Execute", "instance of test case class resets self.all_tests, self.tests_to_execute, self.skipped_tests config", "0 # Skipped tests do not hit collect report #", "exception as we should fail the test if method_name ==", "init module :return: \"\"\" self.test_module = module_name # fill new", "symlink to current test\", color='red') self._save_config() def _check_test_for_skip(self): attribs =", "# == TidenTestPlan for all modules: total = None #", "= self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url'] upload_logs = report_config['upload_logs'] else: return", "= True test_params['setup_test_method'] = setup_method # next, teardown fixture if", "a short name of test module, e.g. test module file", "model classes self.test_class.tiden.config = self.config self.test_class.tiden.ssh = self.ssh_pool self.test_class.config =", "name test_plan = {} # == for current test module:", "= True skip_no_start = True elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs,", "'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed = True try:", "for file in glob(\"%s/*\" % self.config['rt']['resource_dir']): if path.isfile(file): copyfile(file, f\"{test_resource_dir}/{basename(file)}\")", "\"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name) # Update Tiden", "test_param.get('continue_on_fail') test_with_iterations = True if repeated_test_count > 1 else False", "= report_config['files_url'] upload_logs = report_config['upload_logs'] else: return if test_dir: try:", "return fixture_passed def _call_plugin_manager(self, execution_point): args = [self.test_module, self.test_class] if", "( len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue') test_cnt = 0 #", "setup fixture if hasattr(test_function, \"__setup__\"): setup_fixture = getattr(test_function, \"__setup__\") if", "__create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self,", "if upload_logs: cmd = f'cd {test_dir}; ' \\ f'curl -H", "= get_class_from_module(self.module_short_name) # Update Tiden config self.config['rt'] = { 'test_class':", "if hasattr(test_class, 'check_requirements'): test_class.check_requirements() for test_module in sorted(self.modules.keys()): # cleanup", "use this file except in compliance with the License. #", "None if kwargs.get('xunit_path'): xunit_path_var = kwargs.get('xunit_path') elif config.get('var_dir') and config.get('xunit_file'):", "if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() # Set ssh and config apps", "teardown_fixture) test_params['teardown_test_method'] = teardown_method # don't forget known issues if", "module' test case class test_class = None # == for", "AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts': []}) def empty_init(self, config, ssh_pool): self.config", "'<suite_name>.<test_file_name>': { # 'path': <full-path-to-test-file>, # 'module_short_name': <test_file_name>, # }", "return self.test_iteration = 1 self.current_test_method = test_method_name if hasattr(self.test_class, '__configurations__'):", "of {}'.format(skip_msg, known_issue) skip_test = True skip_no_start = True elif", "test_method.split('(')[0] if '(' in test_method else test_method test_dir_name = test_method_name", "is None: configuration = get_actual_configuration(self.config, cfg_options) configuration_representation = get_configuration_representation(cfg_options, configuration)", "'repeated_test_count'): repeat_count = test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name test_params['repeated_test_count'] = repeat_count", "self.current_test_method = None def _run_tests(self, tests_to_execute): test_plan = self.test_plan[self.test_module] try:", "except Exception as e: test_status = 'error' test_exception = e", "None # Tiden SshPool instance ssh_pool = None # Tiden", "class_attr in dir(test_class): if class_attr.startswith('test_'): yield class_attr def collect_tests0(self, test_method_names):", "options current_test_name = None # test method name only current_test_method", "test teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class,", "and not repeated_test_continue_on_fail: self.result.update_test_name('{}_iteration_{}'.format(current_test, self.test_iteration + 1)) break finally: self.current_test_name", "self._call_plugin_manager('after_test_class_%s' % fixture_name) return fixture_passed def _call_plugin_manager(self, execution_point): args =", "), color='blue') test_cnt = 0 # Skipped tests do not", "if self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test'] re_decorate = False if type({})", "files_receiver_url = report_config['files_url'] upload_logs = report_config['upload_logs'] else: return if test_dir:", "hasattr(test_function, \"__skip_conds__\") and \\ len(test_function.__skip_conds__) > 0: skip_conditions = test_function.__skip_conds__", "'__configuration_options__') configuration = get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options for %s:\\n%s\" %", "%s sec' % (int(time() - started)), current_method_name=fixture_name) log_print('Exception in %s.%s.%s:", "+ configuration_representation else: self.current_test_name = self.current_test_method def collect_test0(self): # collect", "else: if common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy() finally: self.current_test_method", "False) return test_params def _skip_tests(self): test_plan = self.test_plan[self.test_module] skipped_tests =", "fake_init=empty_init) test_plan = self.test_plan[self.test_module] for test_name in sorted(test_plan.tests_to_execute): test_param =", "import step, InnerReportConfig, Step, add_attachment, AttachmentType from .util import log_print,", "'ln -sfn %s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except", "None # Tiden PluginManager instance pm = None # longest", "except Exception: log_print(\"Can't create symlink to current test\", color='red') self._save_config()", "self.test_module, 'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir, 'remote': { 'test_module_dir': remote_test_module_dir, }", "re-decorate all tests re_decorate = True repeat_count = int(repeated_test_option) elif", "else: cfg_options = getattr(self.test_class, '__configuration_options__').copy() configurations = getattr(self.test_class, '__configurations__').copy() for", "sorted(test_plan.skipped_tests) try: for current_test in skipped_tests: test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param)", "in compliance with the License. # You may obtain a", "= kwargs.get('ssh_pool') self.pm: PluginManager = kwargs.get('plugin_manager') def collect_tests(self): \"\"\" Collect", "for class_attr in dir(test_class): if class_attr.startswith('test_'): yield class_attr def collect_tests0(self,", "software # distributed under the License is distributed on an", "color='yellow') for self.test_iteration in range(repeated_test_count): if test_with_iterations: log_print(\"{} started (iteration", "sec' % (int(time() - started)), current_method_name=fixture_name) # except (AssertionError, TidenException)", "test_method_name, all_tests[test_method].get('repeated_test_name')[self.test_iteration]) self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'],", "repeat_count = int(repeated_test_option) elif self.current_test_method in repeated_test_option.keys(): # otherwise re-decorate", "-sfn %s %s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except Exception:", "modules = None # Tiden config dictionary config = None", "= import_module(\"suites.%s\" % test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name']) test_class = getattr(module,", "in sorted(self.modules.keys()): module = import_module(\"suites.%s\" % test_module) test_class_name = get_class_from_module(self.modules[test_module]['module_short_name'])", "= self.config.get('repeated_test_continue_on_fail', False) return test_params def _skip_tests(self): test_plan = self.test_plan[self.test_module]", "--to=repeated_test.test_name=N options # and decorate test with @repeated_test automagically if", "= None # instance of current module' test case class", "test_param.update({ 'skip_msg': skip_msg, 'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param:", "\\ len(test_function.__skip_conds__) > 0: skip_conditions = test_function.__skip_conds__ for skip_condition in", "all_tests = self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try:", "module for all configurations :param test_method_names: :return: \"\"\" if not", "test_report: InnerReportConfig = getattr(self, '_secret_report_storage') idx_to_add = None for idx,", "self.total.update(self.test_plan[test_module]) log_print(\"*** Found %s tests. %s skipped. Going to 'run'", "if hasattr(test_function, \"__skipped__\"): skip_msg = 'skipped cause of %s' %", "AbstractSshPool({'hosts': []}) def empty_init(self, config, ssh_pool): self.config = config self.ssh", "module = import_module(\"suites.%s\" % self.test_module) # used for collect_only if", "= getattr(self.test_class, '__configurations__').copy() for configuration in configurations: # set configuration", "config apps model classes self.test_class.tiden.config = self.config self.test_class.tiden.ssh = self.ssh_pool", "exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) #", "test_params def _skip_tests(self): test_plan = self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests) try:", "self.test_plan = {} self.total = TidenTestPlan() def __prepare_module_vars(self, module_name, fake_init=None):", "skip_message = skip_condition(self.config) if not conditions_met: skip_msg = 'skipped cause", "[] self.tests_to_execute = [] def update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute)", "get logs\\n{traceback.format_exc()}', color='pink') # if exception in setup method then", "path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir'] = \"%s/res/%s\" % (self.config['suite_dir'], self.module_short_name[5:]) for file", "self.modules = kwargs.get('modules') else: self.modules = get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config =", "join(config.get('var_dir'), config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool = kwargs.get('ssh_pool') self.pm:", "test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir'] =", "0 # instance of Result class result = None #", "Result class result = None # current test module, a", "def __call_module_setup_teardown(self, fixture_name): \"\"\" Execute test module setup/teardown fixture. :param", "= True try: if hasattr(self.test_class, fixture_name): started = time() try:", "current_test_method = None def __init__(self, config, **kwargs): if kwargs.get('modules', None)", "hasattr(self.test_class, 'get_run_info') else None ) # Execute test teardown method", "= exec_report.steps + test_report.steps[idx_to_add].get('children', []) title = getattr(getattr(self.test_class, self.current_test_method), '__report_title__',", "from .sshpool import SshPool from uuid import uuid4 from traceback", "= config self.long_path_len = get_long_path_len(self.modules) xunit_path_var = None if kwargs.get('xunit_path'):", "test was previously decorated by @repeated_test, extract original test_names original_names", "cfg_options = getattr(self.test_class, '__configuration_options__') configuration = get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options", "# used for collect_only if fake_init: self.test_class = getattr(module, self.test_class_name)", "self.test_plan[self.test_module] try: for test_cnt, current_test in enumerate(tests_to_execute, start=1): test_param =", "self.current_test_method else: current_method_name = '' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name,", "setup_passed: exit(1) def create_test_module_attr_yaml(self, test_method_names): # create attr.yaml for current_test_name", "cause test is MUTED' known_issue = None if hasattr(test_function, \"__known_issues__\"):", "e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report: InnerReportConfig = getattr(self.test_class,", "for host_ip, output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self, host_ip): for", "collect_tests(self): \"\"\" Collect tests from all modules. \"\"\" log_print(\"*** Collecting", "test_module_dir = \"%s/%s\" % (self.config['suite_var_dir'], self.module_short_name) remote_test_module_dir = \"%s/%s\" %", "color='yellow') test_status = self._run_test() if test_with_iterations and test_status != 'pass'", "module working local and remote directories. Copies resources from suite", "common_test_param: :return: \"\"\" try: test_plan = self.test_plan[self.test_module] for test_method_name in", "(self.test_class.__class__.__name__, '\\n'.join([ '\\t' + cfg_option_name + '=' + str( configuration[i])", "write_yaml_file(self.config['config_path'], self.config) @staticmethod def gen_tests(test_class): \"\"\" Generates all test method", "try: self._call_plugin_manager(f'before_test_method_{method_name}') all_tests = self.test_plan[self.test_module].all_tests if all_tests[self.current_test_name].get(f'{method_name}_test_method'): method_to_execute = all_tests[self.current_test_name].get(f'{method_name}_test_method')", "= all_tests[self.current_test_name].get(f'{method_name}_test_method') self.__print_with_format(msg=str(method_to_execute.__name__)) try: if all_tests[self.current_test_name].get(f'{method_name}_test_params'): method_to_execute(self.test_class) else: method_to_execute() except", "to runner and test is not marked with one of", "from uuid import uuid4 from traceback import format_exc from .runner", "self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\", color='green') except (AssertionError, TidenException) as e:", "in enumerate(test_report.steps): if test_step['status'] is None: idx_to_add = idx break", "\"\"\" Collect given set of tests from test module for", "in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param = { 'test_method_name': test_method_name, }", "with the License. # You may obtain a copy of", "datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self, msg=None): return (\"%s.%s.%s \" % (", "'_secret_report_storage', InnerReportConfig()) test_exception = None tb_msg = None test_status =", "'__configurations__'): cfg_options = getattr(self.test_class, '__configuration_options__') configuration = get_actual_configuration(self.config, cfg_options) log_print(\"Configuration", "re_decorate = True repeat_count = int(repeated_test_option) elif self.current_test_method in repeated_test_option.keys():", "self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests) try: for current_test in skipped_tests: test_param", "\"__attrib__\"): attribs = getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method)) # if attr is", "known_issue=known_issue, run_info=self.test_class.get_run_info() if hasattr(self.test_class, 'get_run_info') else None ) # Execute", "module_short_name = None # a name of module' test class", "name of module' test class test_class_name = None # instance", "check options and skip itself set_configuration_options(cfg_options, self.config, configuration) self.collect_tests1(test_method_names, common_test_param={", "Exception as e: fixture_passed = False self.__print_with_format('failed in %s sec'", "2017-2020 GridGain Systems. # # Licensed under the Apache License,", ".report.steps import step, InnerReportConfig, Step, add_attachment, AttachmentType from .util import", "1, repeated_test_count), color='yellow') test_status = self._run_test() if test_with_iterations and test_status", "extension module_short_name = None # a name of module' test", "module: # test name, with all configuration options current_test_name =", "iterations <= 0' skip_no_start = False if is_skipped: test_param.update({ 'skip_msg':", "join, basename from glob import glob import traceback class TidenTestPlan:", "= None def _run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception = None", "known_issue: skip_msg = '{} cause of {}'.format(skip_msg, known_issue) skip_test =", "of module' test class test_class_name = None # instance of", "# Execute module teardown self.__call_module_setup_teardown('teardown') # this is for correct", "@step('logs') def __save_logs(self): test_dir = self.config.get('rt', {}).get('remote', {}).get('test_dir') if 'WardReport'", "color='yellow') finally: self.current_test_name = None self.current_test_method = None def _run_tests(self,", "class result = None # current test module, a key", "configuration_representation = get_configuration_representation(cfg_options, configuration) self.current_test_name = self.current_test_method + configuration_representation else:", "False pad_string = self.__get_pad_string() log_print(\"%s started (%s from %s)\" %", "method call_method(self.test_class, fixture_name) self.__print_with_format('finished in %s sec' % (int(time() -", "express or implied. # See the License for the specific", "name without .py extension module_short_name = None # a name", "except in compliance with the License. # You may obtain", "by default runs only once, # unless repeated_test_count set explicitly", "skip by --to=repeated_test.test_name=0 is_skipped = True skip_msg = 'skipped due", "= int(time()) known_issue = self.test_plan[self.test_module].all_tests[self.current_test_name].get('known_issue') setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) try: self.pm.do(\"before_test_method\",", "Collect tests from all modules. \"\"\" log_print(\"*** Collecting tests ***\",", "configuration = get_actual_configuration(self.config, cfg_options) log_print(\"Configuration options for %s:\\n%s\" % (self.test_class.__class__.__name__,", "rare case, skip by --to=repeated_test.test_name=0 is_skipped = True skip_msg =", "= repeat_count test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False) return", "= self.test_plan[self.test_module] for test_name in sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param)", "else: method_to_execute() except Exception as e: log_print(f'!!! Exception in {method_name}", "%s.%s.%s: %s\\n%s' % (self.test_module, self.test_class_name, fixture_name, str(e), str(traceback.format_exc())), color='red') finally:", "= e tb_msg = traceback.format_exc() except Exception as e: test_status", "self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report: InnerReportConfig = getattr(self.test_class, '_secret_report_storage', None)", "is not marked with one of the attribute # then", "self.current_test_method), '__report_suites__', None) if title: test_report.title = title test_report.suites =", "configuration=None, cfg_options=None, **kwargs): if not test_method_name: return self.test_iteration = 1", "config self.ssh = ssh_pool self.__prepare_session_vars() for test_module in sorted(self.modules.keys()): #", "def __print_with_format(self, msg='', current_method_name=''): if not current_method_name: if self.current_test_method: current_method_name", "in dir(test_class): if class_attr.startswith('test_'): yield class_attr def collect_tests0(self, test_method_names): \"\"\"", "Found %s tests in %s. %s skipped. Going to run", "+ cfg_option_name + '=' + str( configuration[i]) for i, cfg_option_name", "getattr(test_function, \"__setup__\") if type(setup_fixture) == type(''): setup_method = getattr(self.test_class, setup_fixture)", "self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit() log_print(\"%s %s\" % (pad_string, test_param['skip_msg']),", "try: self.__save_logs() except: log_print(f'Failed to get logs\\n{traceback.format_exc()}', color='pink') # if", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= getattr(self.test_class, self.current_test_method) if hasattr(test_function, \"__attrib__\"): attribs = getattr(test_function, \"__attrib__\")", "= None if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") if", "= True repeat_count = int(repeated_test_option[self.current_test_method]) if re_decorate: from tiden.util import", "None ) # Execute test teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status,", "cause of %s' % skip_message skip_test = True return skip_test,", "log_print(f'Failed to send report. \\n{format_exc()}', color='pink') def __copy_resources_to_local_test_module_directory(self): \"\"\" Copy", "self.result def _save_config(self): write_yaml_file(self.config['config_path'], self.config) @staticmethod def gen_tests(test_class): \"\"\" Generates", "total = None # dictionary of TidenTestPlan indexed by test", "elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')): skip_msg = 'skipped", "self.ssh = ssh_pool self.__prepare_session_vars() for test_module in sorted(self.modules.keys()): # cleanup", "{ 'test_module_dir': remote_test_module_dir, } } module = import_module(\"suites.%s\" % self.test_module)", "Systems. # # Licensed under the Apache License, Version 2.0", "test_name in sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt = test_cnt", "tests from all modules. \"\"\" log_print(\"*** Collecting tests ***\", color='blue')", "CONDITIONS OF ANY KIND, either express or implied. # See", "else None ) # Execute test teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method',", "vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module) # find test methods: if", "host_ip, output_lines in self.ssh_pool.exec([f\"ls {test_dir}\"]).items(): with Step(self, host_ip): for line", "and # limitations under the License. from .tidenpluginmanager import PluginManager", "with Step(self, host_ip): for line in output_lines: file_name: str for", "resources from suite directory to local test module working directory.", "\"\"\" Execute test module setup/teardown fixture. :param fixture_name: either 'setup'", "repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: # that's a brand new decoration decorated_test", "} } module = import_module(\"suites.%s\" % self.test_module) # used for", "{send_file_name}\" ' \\ f'-F \"file=@{file_name};filename={file_name}\" ' \\ f'{files_receiver_url}/files/add' self.ssh_pool.exec_on_host(host_ip, [cmd])", "if known_issue: skip_msg = '{} cause of {}'.format(skip_msg, known_issue) skip_test", "self.config, configuration) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) def", "from .priority_decorator import get_priority_key from .sshpool import SshPool from uuid", "False self.__print_with_format('failed in %s sec' % (int(time() - started)), current_method_name=fixture_name)", "%s' % skip_message skip_test = True return skip_test, skip_msg, skip_no_start", "self.__copy_resources_to_local_test_module_directory() # Set ssh and config apps model classes self.test_class.tiden.config", "yield class_attr def collect_tests0(self, test_method_names): \"\"\" Collect given set of", "pad_string = self.__get_pad_string() log_print(\"%s started (%s from %s)\" % (pad_string,", "test methods: if hasattr(self.test_class, '__configurations__'): cfg_options = getattr(self.test_class, '__configuration_options__') configuration", "module working directory. :param module_name: name of the module to", "1 self.current_test_method = test_method_name if hasattr(self.test_class, '__configurations__'): if cfg_options is", "{} self.skipped_tests = [] self.tests_to_execute = [] def update(self, other):", "file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name, AttachmentType.FILE) if upload_logs:", "'(' in test_method else test_method test_dir_name = test_method_name all_tests =", "1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass') def process_tests(self): \"\"\"", "<full-path-to-test-file>, # 'module_short_name': <test_file_name>, # } # } modules =", "setattr(self.test_class, '_secret_report_storage', InnerReportConfig()) def __call_module_setup_teardown(self, fixture_name): \"\"\" Execute test module", "***\\n%s\" % ( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for", "current test within module: # test name, with all configuration", "sorted(test_plan.tests_to_execute): test_param = test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt = test_cnt + 1", "self.current_test_method) if hasattr(test_function, 'repeated_test_count'): repeat_count = test_function.repeated_test_count repeated_test_name = test_function.repeated_test_name", "self.current_test_name) pad_string = method_long_name.ljust(long_path_len, '.') log_print(\"%s found (%s from %s)\"", "method_to_execute() except Exception as e: log_print(f'!!! Exception in {method_name} code", "SshPool = kwargs.get('ssh_pool') self.pm: PluginManager = kwargs.get('plugin_manager') def collect_tests(self): \"\"\"", "self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args) def __update_config_and_save(self, current_method_name=None): test_method = current_method_name", "class TidenRunner: # { # '<suite_name>.<test_file_name>': { # 'path': <full-path-to-test-file>,", "= getattr(self.test_class, '__configuration_options__').copy() configurations = getattr(self.test_class, '__configurations__').copy() for configuration in", "None def _run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception = None tb_msg", "Exception as e: test_status = 'error' test_exception = e tb_msg", "if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args) def __update_config_and_save(self, current_method_name=None): test_method =", "from importlib import import_module from os import path, mkdir from", "test_dir_name = test_method_name all_tests = self.test_plan[self.test_module].all_tests # cause of repeated_tests", "for process tests - prepare test directory and resources self.__create_test_module_directory(remote_test_module_dir,", "test_resource_dir = \"%s/res\" % self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir): mkdir(test_resource_dir) self.config['rt']['resource_dir']", "InnerReportConfig = getattr(self.test_class, '_secret_report_storage', None) test_report: InnerReportConfig = getattr(self, '_secret_report_storage')", "fixture if hasattr(test_function, \"__teardown__\"): teardown_fixture = getattr(test_function, \"__teardown__\") teardown_method =", "**kwargs): if not test_method_name: return self.test_iteration = 1 self.current_test_method =", "process if teardown function didn't kill nodes if not hasattr(self.test_class,", "\"\"\" self.test_module = module_name # fill new module vars self.module_short_name", "= getattr(test_function, \"__skip_cond__\") conditions_met, skip_message = skip_condition(self.config) if not conditions_met:", "passed {exec_time(started)}\", color='green') except (AssertionError, TidenException) as e: test_status =", "only if test name matches given option re_decorate = True", "test_param = { 'test_method_name': test_method_name, } is_skipped, skip_msg, skip_no_start =", "if configuration is None: configuration = get_actual_configuration(self.config, cfg_options) configuration_representation =", "# collect test params test_params = { 'test_name': self.current_test_name, }", "with @repeated_test automagically if that's required if self.config.get('repeated_test'): repeated_test_option =", "= [ 'mkdir -p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'], self.test_class_name, str(test_dir_name)), 'ln", "hit collect report # Now generate results for 'executed' tests", "test_plan = self.test_plan[self.test_module] try: for test_cnt, current_test in enumerate(tests_to_execute, start=1):", "= None # dictionary of TidenTestPlan indexed by test module", "current_test_name) create_case(test_function) def __prepare_session_vars(self): self.test_plan = {} self.total = TidenTestPlan()", "common_test_param={}): \"\"\" Collect given tests from current test module :param", "test_cnt, len(tests_to_execute)), color='yellow') for self.test_iteration in range(repeated_test_count): if test_with_iterations: log_print(\"{}", "from given configuration to Tiden config, # so that test", "directory to local test module working directory. :param module_name: name", "not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status @step('logs') def __save_logs(self): test_dir", "known_issue # test by default runs only once, # unless", "__print_found_test_method_to_execute(self, long_path_len, test_cnt, test_module): method_long_name = \"%s.%s.%s \" % (test_module,", "' \\ f'curl -H \"filename: {send_file_name}\" ' \\ f'-F \"file=@{file_name};filename={file_name}\"", "def __prepare_session_vars(self): self.test_plan = {} self.total = TidenTestPlan() def __prepare_module_vars(self,", "== type(''): setup_method = getattr(self.test_class, setup_fixture) else: setup_method = setup_fixture", "the exception as we should fail the test if method_name", "SshPool from uuid import uuid4 from traceback import format_exc from", "process_tests(self): \"\"\" Run all tests :return: \"\"\" log_print(\"*** Tests ***\",", "the attribute # then skip it. if 'mute' in attribs:", ".logger import * from .runner import get_test_modules, get_long_path_len, get_class_from_module, known_issue_str", "the module to prepare :param fake_init: do not init module", "tiden.util import repeated_test original_test = test_function if hasattr(original_test, 'repeated_test_name'): #", "self.config['rt']['test_resource_dir'] = unix_path(test_resource_dir) def __create_test_module_directory(self, remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p", "setup_method = setup_fixture test_params['setup_test_params'] = True test_params['setup_test_method'] = setup_method #", "to run %s tests ***\\n%s\" % ( len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests),", "test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'], skip_no_start=test_param['skip_no_start']) self.result.update_xunit()", "= skip_condition(self.config) if not conditions_met: skip_msg = 'skipped cause of", "# except (AssertionError, TidenException) as e: except Exception as e:", "tests from current test module :param test_method_names: :param common_test_param: :return:", "test_class.check_requirements() for test_module in sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module]", "proper full name of module under 'suites' directory sets up", "skip_msg = 'skipped cause of %s' % test_function.__skipped_message__ skip_test =", "getattr(self.test_class, self.current_test_method) if hasattr(test_function, \"__attrib__\"): attribs = getattr(test_function, \"__attrib__\") attribs.append(str(self.current_test_method))", "test_method_name: return self.test_iteration = 1 self.current_test_method = test_method_name if hasattr(self.test_class,", "> 0: if repeat_count == 1: # don't rename tests", "cfg_options, }) def collect_tests1(self, test_method_names, common_test_param={}): \"\"\" Collect given tests", "= getattr(test_function, \"__known_issues__\") if known_issue: skip_msg = '{} cause of", "return skip_test, skip_msg, skip_no_start def get_tests_results(self): return self.result def _save_config(self):", "attr.yaml for current_test_name in test_method_names: test_function = getattr(self.test_class, current_test_name) create_case(test_function)", "test name, with all configuration options current_test_name = None #", "long_path_len = get_long_path_len(self.modules) from tiden.sshpool import AbstractSshPool self.ssh_pool = AbstractSshPool({'hosts':", "re-raise the exception as we should fail the test if", "method_to_execute(self.test_class) else: method_to_execute() except Exception as e: log_print(f'!!! Exception in", "Creates test module working local and remote directories. Copies resources", "update(self, other): self.all_tests.update(other.all_tests) self.skipped_tests.extend(other.skipped_tests) self.tests_to_execute.extend(other.tests_to_execute) class TidenRunner: # { #", "Execute setup or teardown method call_method(self.test_class, fixture_name) self.__print_with_format('finished in %s", "( datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self, msg=None): return (\"%s.%s.%s \" %", "str for file_name in line.split('\\n'): if file_name and file_name.endswith('.log'): send_file_name", "self.config['rt']['test_method'] = test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name )", "should fail the test if method_name == 'setup': raise e", "if setup_passed: self._run_tests(tests_to_execute) # Execute module teardown self.__call_module_setup_teardown('teardown') # this", "time() try: self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute setup or teardown", "None if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\") if known_issue:", "test_params['setup_test_method'] = setup_method # next, teardown fixture if hasattr(test_function, \"__teardown__\"):", "test_method_names: :return: \"\"\" if not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else: cfg_options", "test directory and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class = getattr(module, self.test_class_name)(self.config,", "test_function.__skipped_message__ skip_test = True if hasattr(test_function, \"__skip_cond__\"): skip_condition = getattr(test_function,", "# here, we check --to=repeated_test=N and --to=repeated_test.test_name=N options # and", "- started)), current_method_name=fixture_name) log_print('Exception in %s.%s.%s: %s\\n%s' % (self.test_module, self.test_class_name,", "method self.__call_test_setup_teardown('setup') # self.__print_with_format() with Step(self, 'Execution'): try: call_method(self.test_class, self.current_test_method)", "self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass') def process_tests(self): \"\"\" Run all tests", "module setup setup_passed = self.__call_module_setup_teardown('setup') if setup_passed: self._run_tests(tests_to_execute) # Execute", "or 'teardown' :return: \"\"\" self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed = True", "None skipped_tests = None tests_to_execute = None def __init__(self): self.all_tests", "Skipped tests do not hit collect report # Now generate", "\"{}/{}/{}\".format( self.config['rt']['test_module_dir'], self.config['rt']['test_class'], test_dir_name) try: create_remote_dir = [ 'mkdir -p", "def __init__(self, config, **kwargs): if kwargs.get('modules', None) is not None:", "%s/current_test_directory' % (self.config['rt']['remote']['test_module_dir'], self.config['environment']['home']) ] self.ssh_pool.exec(create_remote_dir) except Exception: log_print(\"Can't create", "for test_module in sorted(self.modules.keys()): module = import_module(\"suites.%s\" % test_module) test_class_name", "# then skip it. if 'mute' in attribs: skip_msg =", "of the module to prepare :param fake_init: do not init", "= test_plan.all_tests[test_name] self.__prepare_test_vars(**test_param) test_cnt = test_cnt + 1 self.result.start_testcase(self.test_class, self.current_test_name)", "shutil import copyfile from os.path import join, basename from glob", "= '' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name, current_method_name, msg)) def __print_current_module_name(self):", "'test_module_name': self.module_short_name, 'test_module_dir': test_module_dir, 'remote': { 'test_module_dir': remote_test_module_dir, } }", "xunit_path_var = join(config.get('var_dir'), config.get('xunit_file')) self.result = Result(xunit_path=xunit_path_var) self.ssh_pool: SshPool =", "in self.config.get('plugins', []): report_config = self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url'] upload_logs", "cmd = f'cd {test_dir}; ' \\ f'curl -H \"filename: {send_file_name}\"", "of current module' test case class test_class = None #", "get_configuration_representation, get_actual_configuration from importlib import import_module from os import path,", "forget known issues if hasattr(test_function, \"__known_issues__\"): known_issue = getattr(test_function, \"__known_issues__\")", "getattr(self.test_class, self.current_test_method) # first setup fixture if hasattr(test_function, \"__setup__\"): setup_fixture", "Tiden SshPool instance ssh_pool = None # Tiden PluginManager instance", "AttachmentType from .util import log_print, unix_path, call_method, create_case, kill_stalled_java, exec_time", "getattr(self.test_class, '__configurations__').copy() for configuration in configurations: # set configuration options", "= 0 # Skipped tests do not hit collect report", "# } modules = None # Tiden config dictionary config", "None test_status = 'pass' pad_string = self.__get_pad_string() started = int(time())", "by test module name test_plan = {} # == for", "test_params['repeated_test_count'] = repeat_count test_params['repeated_test_name'] = repeated_test_name test_params['continue_on_fail'] = self.config.get('repeated_test_continue_on_fail', False)", "self.current_test_method: current_method_name = self.current_test_method else: current_method_name = '' log_print(\"[{}][.{}.{}] {}\".format(", "test_function = getattr(self.test_class, current_test_name) create_case(test_function) def __prepare_session_vars(self): self.test_plan = {}", "teardown method call_method(self.test_class, fixture_name) self.__print_with_format('finished in %s sec' % (int(time()", "self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report: InnerReportConfig = getattr(self.test_class, '_secret_report_storage', None) test_report:", "find test methods: if hasattr(self.test_class, '__configurations__'): cfg_options = getattr(self.test_class, '__configuration_options__')", "# next, teardown fixture if hasattr(test_function, \"__teardown__\"): teardown_fixture = getattr(test_function,", "= kwargs.get('xunit_path') elif config.get('var_dir') and config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'), config.get('xunit_file'))", "# if attr is passed to runner and test is", "if option was given as --to=repeated_test=N, re-decorate all tests re_decorate", "to Tiden config, # so that test can check options", "cfg_options) log_print(\"Configuration options for %s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([ '\\t' +", "that's required if self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test'] re_decorate = False", "def __update_config_and_save(self, current_method_name=None): test_method = current_method_name if current_method_name else self.current_test_method", "test_class_name)(self.config, self.ssh_pool) if hasattr(test_class, 'check_requirements'): test_class.check_requirements() for test_module in sorted(self.modules.keys()):", "dictionary config = None # Tiden SshPool instance ssh_pool =", "{}{}\".format(pad_string, test_status, exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg, known_issue=known_issue,", "exec_time(started), known_issue_str(known_issue)), color='red') self.result.stop_testcase( test_status, e=test_exception, tb=tb_msg, known_issue=known_issue, run_info=self.test_class.get_run_info() if", "test module for all configurations :param test_method_names: :return: \"\"\" if", "#!/usr/bin/env python3 # # Copyright 2017-2020 GridGain Systems. # #", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "used for collect_only if fake_init: self.test_class = getattr(module, self.test_class_name) self.test_class.__init__", "attribs, self.config.get('attr_match', 'any')): skip_msg = 'skipped cause of attrib mismatch'", "- started)), current_method_name=fixture_name) # except (AssertionError, TidenException) as e: except", "= test_method_name if hasattr(self.test_class, '__configurations__'): if cfg_options is None: cfg_options", "[]}) def empty_init(self, config, ssh_pool): self.config = config self.ssh =", "teardown_method # don't forget known issues if hasattr(test_function, \"__known_issues__\"): known_issue", "cfg_options = getattr(self.test_class, '__configuration_options__').copy() configurations = getattr(self.test_class, '__configurations__').copy() for configuration", "True try: if hasattr(self.test_class, fixture_name): started = time() try: self.__print_with_format('started',", "kwargs.get('modules', None) is not None: self.modules = kwargs.get('modules') else: self.modules", "attrib mismatch' skip_test = True skip_no_start = True if hasattr(test_function,", ":return: \"\"\" log_print(\"*** Tests ***\", color='blue') self.__prepare_session_vars() # Check requirements", "test_plan.all_tests[test_name]['test_method_name'] for test_name in tests_to_execute ])), color='blue') # Execute module", "== for current test within module: # test name, with", "self.module_short_name self.test_class - creates instance of test case class resets", "configuration = get_actual_configuration(self.config, cfg_options) configuration_representation = get_configuration_representation(cfg_options, configuration) self.current_test_name =", "test_cnt + 1 self.result.start_testcase(self.test_class, self.current_test_name) self.__print_found_test_method_to_execute(long_path_len, test_cnt, test_module) self.result.stop_testcase('pass') def", "repeated_test original_test = test_function if hasattr(original_test, 'repeated_test_name'): # that test", "= getattr(module, self.test_class_name)(self.config, self.ssh_pool) else: # for process tests -", "self.ssh_pool) if hasattr(self.test_class, 'tiden'): self.__copy_resources_to_local_test_module_directory() # Set ssh and config", "per-module initialization of internal variables: Expects self.test_module be set to", "known_issue_str from .priority_decorator import get_priority_key from .sshpool import SshPool from", "kill_stalled_java(self.ssh_pool) return test_status @step('logs') def __save_logs(self): test_dir = self.config.get('rt', {}).get('remote',", "self.test_class_name, current_method_name, msg)) def __print_current_module_name(self): log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7], self.test_module))", "None def _run_tests(self, tests_to_execute): test_plan = self.test_plan[self.test_module] try: for test_cnt,", "class :param test_class: :return: \"\"\" for class_attr in dir(test_class): if", "teardown self.__call_module_setup_teardown('teardown') # this is for correct fail in Jenkins", "color='blue') # Execute module setup setup_passed = self.__call_module_setup_teardown('setup') if setup_passed:", "{})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute test setup method self.__call_test_setup_teardown('setup')", "= report_config['upload_logs'] else: return if test_dir: try: for host_ip, output_lines", "getattr(self.test_class, '__configuration_options__') if configuration is None: configuration = get_actual_configuration(self.config, cfg_options)", "all test method of given test class :param test_class: :return:", "= self.config self.test_class.tiden.ssh = self.ssh_pool self.test_class.config = self.config self.test_class.ssh =", "'suites' directory sets up self.test_class_name self.module_short_name self.test_class - creates instance", "= self.current_test_method else: current_method_name = '' log_print(\"[{}][.{}.{}] {}\".format( datetime.now().isoformat()[11:-7], self.test_class_name,", "GridGain Systems. # # Licensed under the Apache License, Version", "len(self.total.all_tests), len(self.total.skipped_tests), len(self.total.tests_to_execute) ), color='blue') test_cnt = 0 # Skipped", "Version 2.0 (the \"License\"); # you may not use this", "method then re-raise the exception as we should fail the", "skip_no_start = True elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'), attribs, self.config.get('attr_match', 'any')):", "test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail = test_param.get('continue_on_fail') test_with_iterations = True if repeated_test_count", "repeated_test_count set explicitly by decorator or framework option repeat_count =", "started (%s from %s)\" % (pad_string, test_cnt, len(tests_to_execute)), color='yellow') for", "self.all_tests = {} self.skipped_tests = [] self.tests_to_execute = [] def", "to local test module working directory. :param module_name: name of", "__update_config_and_save(self, current_method_name=None): test_method = current_method_name if current_method_name else self.current_test_method test_method_name", "None) is not None: self.modules = kwargs.get('modules') else: self.modules =", "directory sets up self.test_class_name self.module_short_name self.test_class - creates instance of", "'skipped cause of attrib mismatch' skip_test = True skip_no_start =", "f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name, AttachmentType.FILE) if upload_logs: cmd = f'cd", "kwargs.get('xunit_path') elif config.get('var_dir') and config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'), config.get('xunit_file')) self.result", "by applicable law or agreed to in writing, software #", "required if self.config.get('repeated_test'): repeated_test_option = self.config['repeated_test'] re_decorate = False if", "under 'suites' directory sets up self.test_class_name self.module_short_name self.test_class - creates", "cause of %s' % skip_message skip_test = True if hasattr(test_function,", "local test module working directory. :param module_name: name of the", "skip_condition(self.config) if not conditions_met: skip_msg = 'skipped cause of %s'", "len(test_plan.all_tests), self.test_class_name, len(test_plan.skipped_tests), len(test_plan.tests_to_execute), '\\n'.join([ test_plan.all_tests[test_name]['test_method_name'] for test_name in tests_to_execute", "= traceback.format_exc() finally: if test_status != 'pass': log_print(tb_msg, color='red') log_print(\"{}", "\"\"\" if not hasattr(self.test_class, '__configurations__'): self.collect_tests1(test_method_names) else: cfg_options = getattr(self.test_class,", "import format_exc from .runner import set_configuration_options, get_configuration_representation, get_actual_configuration from importlib", "**common_test_param) test_param = { 'test_method_name': test_method_name, } is_skipped, skip_msg, skip_no_start", "sorted(self.modules.keys()): # cleanup instance vars self.test_plan[test_module] = TidenTestPlan() self.__prepare_module_vars(test_module, fake_init=empty_init)", "a name of module' test class test_class_name = None #", "TidenException) as e: test_status = 'fail' test_exception = e tb_msg", "fixture_passed def _call_plugin_manager(self, execution_point): args = [self.test_module, self.test_class] if self.current_test_method:", "test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy() finally: self.current_test_method = None self.current_test_name =", "instance ssh_pool = None # Tiden PluginManager instance pm =", "of test case class resets self.all_tests, self.tests_to_execute, self.skipped_tests config fills", "create_case, kill_stalled_java, exec_time from .result import Result from .util import", "color='red') log_print(traceback.format_exc()) try: self.__save_logs() except: log_print(f'Failed to get logs\\n{traceback.format_exc()}', color='pink')", "test\", color='red') self._save_config() def _check_test_for_skip(self): attribs = [] skip_test =", "self.test_module, self.test_class_name, msg if msg else self.current_test_method)) \\ .ljust(self.long_path_len, '.')", "for applications for test_module in sorted(self.modules.keys()): module = import_module(\"suites.%s\" %", "list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={ 'configuration': configuration, 'cfg_options': cfg_options, }) test_plan =", "= test_method_name self.config['rt']['remote']['test_dir'] = \"{}/{}/{}\".format( self.config['rt']['remote']['test_module_dir'], self.config['rt']['test_class'], test_dir_name ) self.config['rt']['test_dir']", "'Execution'): try: call_method(self.test_class, self.current_test_method) finally: self.__set_child_steps_to_parent() self.__save_logs() log_print(f\"{pad_string} passed {exec_time(started)}\",", "'__report_suites__', None) if title: test_report.title = title test_report.suites = suites", "self.test_class_name)(self.config, self.ssh_pool) else: # for process tests - prepare test", "report_config['files_url'] upload_logs = report_config['upload_logs'] else: return if test_dir: try: for", "test_function if hasattr(original_test, 'repeated_test_name'): # that test was previously decorated", "= None for idx, test_step in enumerate(test_report.steps): if test_step['status'] is", "fixture_name) fixture_passed = True try: if hasattr(self.test_class, fixture_name): started =", "\"\"\" test_resource_dir = \"%s/res\" % self.config['rt']['test_module_dir'] if not path.exists(test_resource_dir): mkdir(test_resource_dir)", "Execute test teardown method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue,", "not current_method_name: if self.current_test_method: current_method_name = self.current_test_method else: current_method_name =", "test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage'))", "self.current_test_method test_method_name = test_method.split('(')[0] if '(' in test_method else test_method", "%s sec' % (int(time() - started)), current_method_name=fixture_name) # except (AssertionError,", "= traceback.format_exc() except Exception as e: test_status = 'error' test_exception", "test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string = self.__get_pad_string(msg=self.current_test_method) self.result.skip_testcase_no_start(self.test_class, self.current_test_name, skip_message=test_param['skip_msg'],", "%s:\\n%s\" % (self.test_class.__class__.__name__, '\\n'.join([ '\\t' + cfg_option_name + '=' +", "= self.test_plan[self.test_module].all_tests # cause of repeated_tests decorator if all_tests.get(test_method) and", "= None configuration = None test_method_names = list(self.gen_tests(self.test_class)) self.collect_tests1(test_method_names, common_test_param={", "True test_params['setup_test_method'] = setup_method # next, teardown fixture if hasattr(test_function,", "test_params['teardown_test_method'] = teardown_method # don't forget known issues if hasattr(test_function,", "for all configurations :param test_method_names: :return: \"\"\" if not hasattr(self.test_class,", "types import MethodType setattr(self.test_class, self.current_test_method, MethodType(decorated_test, self.test_class)) test_function = getattr(self.test_class,", "True if hasattr(test_function, \"__skipped__\"): skip_msg = 'skipped cause of %s'", "test_function = getattr(self.test_class, self.current_test_method) if hasattr(test_function, 'repeated_test_count'): repeat_count = test_function.repeated_test_count", "applicable law or agreed to in writing, software # distributed", "if fake_init: self.test_class = getattr(module, self.test_class_name) self.test_class.__init__ = fake_init self.test_class", "import glob import traceback class TidenTestPlan: all_tests = None skipped_tests", "collect report # Now generate results for 'executed' tests for", "module_name: name of the module to prepare :param fake_init: do", "output_lines: file_name: str for file_name in line.split('\\n'): if file_name and", "module: # a short name of test module, e.g. test", "and \\ len(test_function.__skip_conds__) > 0: skip_conditions = test_function.__skip_conds__ for skip_condition", "[self.test_module, self.test_class] if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args) def __update_config_and_save(self, current_method_name=None):", "file_name in line.split('\\n'): if file_name and file_name.endswith('.log'): send_file_name = f'{uuid4()}_{file_name}'", "test_dir_name) try: create_remote_dir = [ 'mkdir -p %s/%s/%s' % (self.config['rt']['remote']['test_module_dir'],", "def __print_current_module_name(self): log_print(\"[%s][%s]\" % ( datetime.now().isoformat()[11:-7], self.test_module)) def __get_pad_string(self, msg=None):", "configuration is None: configuration = get_actual_configuration(self.config, cfg_options) configuration_representation = get_configuration_representation(cfg_options,", "args = [self.test_module, self.test_class] if self.current_test_method: args.append(self.current_test_method) self.pm.do(execution_point, *args) def", "% (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name) # Update Tiden config", "tests_to_execute ])), color='blue') # Execute module setup setup_passed = self.__call_module_setup_teardown('setup')", "{ 'test_name': self.current_test_name, } test_function = getattr(self.test_class, self.current_test_method) # first", "= test_function if hasattr(original_test, 'repeated_test_name'): # that test was previously", "= test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) repeated_test_count = test_param.get('repeated_test_count', 1) repeated_test_continue_on_fail = test_param.get('continue_on_fail')", "Generates all test method of given test class :param test_class:", "} module = import_module(\"suites.%s\" % self.test_module) # used for collect_only", "TidenTestPlan() def __prepare_module_vars(self, module_name, fake_init=None): \"\"\" Prepare per-module initialization of", "Expects self.test_module be set to proper full name of module", "self.config.get('plugins', []): report_config = self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url'] upload_logs =", "log_print('Exception in %s.%s.%s: %s\\n%s' % (self.test_module, self.test_class_name, fixture_name, str(e), str(traceback.format_exc())),", "self.current_test_method = test_method_name if hasattr(self.test_class, '__configurations__'): if cfg_options is None:", "skip_condition in skip_conditions: conditions_met, skip_message = skip_condition(self.test_class) if not conditions_met:", "return test_params def _skip_tests(self): test_plan = self.test_plan[self.test_module] skipped_tests = sorted(test_plan.skipped_tests)", "method self.__call_test_setup_teardown('teardown') self.pm.do('after_test_method', test_status=test_status, exception=test_exception, stacktrace=tb_msg, known_issue=known_issue, description=getattr(self.test_class, self.current_test_method, lambda:", "= False if is_skipped: test_param.update({ 'skip_msg': skip_msg, 'skip_no_start': skip_no_start, })", "of the test name long_path_len = 0 # instance of", "of a test class from types import MethodType setattr(self.test_class, self.current_test_method,", "# You may obtain a copy of the License at", "0: if repeat_count == 1: # don't rename tests when", "skip_condition = getattr(test_function, \"__skip_cond__\") conditions_met, skip_message = skip_condition(self.config) if not", "previously decorated by @repeated_test, extract original test_names original_names = original_test.repeated_test_name", "= original_test.repeated_test_name decorated_test = repeated_test(repeat_count, test_names=original_names)(original_test.__func__) else: # that's a", "code !!!', color='red') log_print(traceback.format_exc()) try: self.__save_logs() except: log_print(f'Failed to get", "_run_tests(self, tests_to_execute): test_plan = self.test_plan[self.test_module] try: for test_cnt, current_test in", "remote_test_module_dir, test_module_dir): mkdir(test_module_dir) self.ssh_pool.exec([f'mkdir -p {remote_test_module_dir}']) @step('{method_name}') def __call_test_setup_teardown(self, method_name):", "None self.current_test_method = None def _run_test(self): setattr(self, '_secret_report_storage', InnerReportConfig()) test_exception", "skip_test = True skip_no_start = True elif self.config.get('attrib') and should_be_skipped(self.config.get('attrib'),", "return test_status @step('logs') def __save_logs(self): test_dir = self.config.get('rt', {}).get('remote', {}).get('test_dir')", "get_configuration_representation(cfg_options, configuration) self.current_test_name = self.current_test_method + configuration_representation else: self.current_test_name =", "% (pad_string, test_cnt, len(tests_to_execute)), color='yellow') for self.test_iteration in range(repeated_test_count): if", "tests re_decorate = True repeat_count = int(repeated_test_option) elif self.current_test_method in", "for current_test in skipped_tests: test_param = test_plan.all_tests[current_test] self.__prepare_test_vars(**test_param) pad_string =", "send_file_name = f'{uuid4()}_{file_name}' add_attachment(self, file_name, send_file_name, AttachmentType.FILE) if upload_logs: cmd", "None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) # Kill java process if teardown function", "= None # == for current test within module: #", "__init__(self): self.all_tests = {} self.skipped_tests = [] self.tests_to_execute = []", "Copies resources from suite directory to local test module working", "self.config self.test_class.ssh = self.ssh_pool self._save_config() def __prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None,", "getattr(self.test_class, current_test_name) create_case(test_function) def __prepare_session_vars(self): self.test_plan = {} self.total =", "for current_test_name in test_method_names: test_function = getattr(self.test_class, current_test_name) create_case(test_function) def", "e: test_status = 'fail' test_exception = e tb_msg = traceback.format_exc()", "= None skipped_tests = None tests_to_execute = None def __init__(self):", "def collect_test0(self): # collect test params test_params = { 'test_name':", "apps model classes self.test_class.tiden.config = self.config self.test_class.tiden.ssh = self.ssh_pool self.test_class.config", "= self.config['repeated_test'] re_decorate = False if type({}) != type(repeated_test_option): #", "if class_attr.startswith('test_'): yield class_attr def collect_tests0(self, test_method_names): \"\"\" Collect given", "self.test_iteration in range(repeated_test_count): if test_with_iterations: log_print(\"{} started (iteration {} from", "getattr(self.test_class, setup_fixture) else: setup_method = setup_fixture test_params['setup_test_params'] = True test_params['setup_test_method']", "= get_test_modules(config, collect_only=kwargs.get('collect_only')) self.config = config self.long_path_len = get_long_path_len(self.modules) xunit_path_var", "idx_to_add = None for idx, test_step in enumerate(test_report.steps): if test_step['status']", "description=getattr(self.test_class, self.current_test_method, lambda: None).__doc__, inner_report_config=getattr(self, '_secret_report_storage')) # Kill java process", "# this magic required to convert decorated test function to", "skip_message skip_test = True if hasattr(test_function, \"__skip_conds__\") and \\ len(test_function.__skip_conds__)", "\"%s.%s.%s \" % (test_module, self.test_class_name, self.current_test_name) pad_string = method_long_name.ljust(long_path_len, '.')", "None def __init__(self, config, **kwargs): if kwargs.get('modules', None) is not", "'module_short_name': <test_file_name>, # } # } modules = None #", "= getattr(self.test_class, self.current_test_method) # first setup fixture if hasattr(test_function, \"__setup__\"):", "'check_requirements'): test_class.check_requirements() for test_module in sorted(self.modules.keys()): # cleanup instance vars", "log_print, unix_path, call_method, create_case, kill_stalled_java, exec_time from .result import Result", "log_print(\"{} started (iteration {} from {})\".format(pad_string, self.test_iteration + 1, repeated_test_count),", "self.current_test_method in repeated_test_option.keys(): # otherwise re-decorate only if test name", "variables: Expects self.test_module be set to proper full name of", "common_test_param: test_param.update(common_test_param) test_plan.tests_to_execute.append(self.current_test_name) test_plan.all_tests[self.current_test_name] = test_param.copy() finally: self.current_test_method = None", "and resources self.__create_test_module_directory(remote_test_module_dir, test_module_dir) self.test_class = getattr(module, self.test_class_name)(self.config, self.ssh_pool) if", "Check requirements for applications for test_module in sorted(self.modules.keys()): module =", "'skip_msg': skip_msg, 'skip_no_start': skip_no_start, }) test_plan.skipped_tests.append(self.current_test_name) else: if common_test_param: test_param.update(common_test_param)", "teardown_fixture = getattr(test_function, \"__teardown__\") teardown_method = getattr(self.test_class, teardown_fixture) test_params['teardown_test_method'] =", "repeat_count == 1: # don't rename tests when only one", "test_class: :return: \"\"\" for class_attr in dir(test_class): if class_attr.startswith('test_'): yield", "\"License\"); # you may not use this file except in", "else: # that's a brand new decoration decorated_test = repeated_test(repeat_count)(original_test.__func__)", "skip_no_start = self._check_test_for_skip() test_param.update(self.collect_test0()) repeat_count = test_param.get('repeated_test_count', 1) if repeat_count", "from time import time from shutil import copyfile from os.path", "test_function = getattr(self.test_class, self.current_test_method) # first setup fixture if hasattr(test_function,", "self.ssh_pool self._save_config() def __prepare_test_vars(self, test_method_name=None, configuration=None, cfg_options=None, **kwargs): if not", "generate results for 'executed' tests for test_module in sorted(self.modules.keys()): self.__prepare_module_vars(test_module,", "'WardReport' in self.config.get('plugins', []): report_config = self.config['plugins']['WardReport'] files_receiver_url = report_config['files_url']", "get_actual_configuration(self.config, cfg_options) configuration_representation = get_configuration_representation(cfg_options, configuration) self.current_test_name = self.current_test_method +", "case, skip by --to=repeated_test.test_name=0 is_skipped = True skip_msg = 'skipped", "tb_msg = None test_status = 'pass' pad_string = self.__get_pad_string() started", "log_print(f\"{pad_string} passed {exec_time(started)}\", color='green') except (AssertionError, TidenException) as e: test_status", "exception in setup method then re-raise the exception as we", "'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status @step('logs') def __save_logs(self): test_dir = self.config.get('rt',", "if repeat_count > 0: if repeat_count == 1: # don't", "(self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name) # Update Tiden config self.config['rt']", "self.pm.do(\"before_test_method\", test_module=self.test_module, test_name=self.current_test_name, artifacts=self.config.get('artifacts', {})) self.result.start_testcase(self.test_class, self.current_test_name) self.__update_config_and_save(current_method_name=self.current_test_name) # Execute", "kill nodes if not hasattr(self.test_class, 'keep_ignite_between_tests'): kill_stalled_java(self.ssh_pool) return test_status @step('logs')", "uuid import uuid4 from traceback import format_exc from .runner import", "self._call_plugin_manager('before_test_class_%s' % fixture_name) fixture_passed = True try: if hasattr(self.test_class, fixture_name):", "xunit_path_var = kwargs.get('xunit_path') elif config.get('var_dir') and config.get('xunit_file'): xunit_path_var = join(config.get('var_dir'),", "of given test class :param test_class: :return: \"\"\" for class_attr", "create symlink to current test\", color='red') self._save_config() def _check_test_for_skip(self): attribs", "test_params['known_issue'] = known_issue # test by default runs only once,", "remote_test_module_dir = \"%s/%s\" % (self.config['remote']['suite_var_dir'], self.module_short_name) self.test_class_name = get_class_from_module(self.module_short_name) #", "= 'skipped cause test is MUTED' known_issue = None if", "self.__print_with_format('started', current_method_name=fixture_name) self.__update_config_and_save(current_method_name=fixture_name) # Execute setup or teardown method call_method(self.test_class,", "one iteration requested test_param['repeated_test_name'] = [] else: # rare case,", "'get_run_info') else None ) # Execute test teardown method self.__call_test_setup_teardown('teardown')", "test_method_name in test_method_names: self.__prepare_test_vars(test_method_name, **common_test_param) test_param = { 'test_method_name': test_method_name,", "__call_module_setup_teardown(self, fixture_name): \"\"\" Execute test module setup/teardown fixture. :param fixture_name:", "else False pad_string = self.__get_pad_string() log_print(\"%s started (%s from %s)\"", "= config self.ssh = ssh_pool self.__prepare_session_vars() for test_module in sorted(self.modules.keys()):", "--to=repeated_test=N, re-decorate all tests re_decorate = True repeat_count = int(repeated_test_option)", "'setup': raise e finally: self.__set_child_steps_to_parent() self._call_plugin_manager(f'after_test_method_{method_name}') def __set_child_steps_to_parent(self): exec_report: InnerReportConfig", "(pad_string, test_cnt, len(tests_to_execute)), color='yellow') for self.test_iteration in range(repeated_test_count): if test_with_iterations:", "import uuid4 from traceback import format_exc from .runner import set_configuration_options," ]
[ "self.checksum == cache_training_set_metadata.get(CHECKSUM) and cached_training_set is not None return valid,", "in self.cache_map.values(): if path_exists(fname): delete(fname) class CacheManager: def __init__(self, dataset_manager,", "data_utils from ludwig.utils.fs_utils import delete, path_exists logger = logging.getLogger(__name__) def", "stem = alphanum(key) ext = ext or self.data_format cache_fname =", "from ludwig.utils import data_utils from ludwig.utils.fs_utils import delete, path_exists logger", "class DatasetCache: def __init__(self, config, checksum, cache_map, dataset_manager): self.config =", "self.cache_map.values(): if path_exists(fname): delete(fname) class CacheManager: def __init__(self, dataset_manager, cache_dir=None):", "cached_validation_set def put(self, training_set, test_set, validation_set, training_set_metadata): logger.info(\"Writing preprocessed training", "if path_exists(self.cache_map[TEST]) else None cached_validation_set = self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else", "dataset, config): if not isinstance(dataset, str): # TODO(travis): could try", "import data_utils from ludwig.utils.fs_utils import delete, path_exists logger = logging.getLogger(__name__)", "= self.get_cache_key(dataset, config) cache_map = { META: self.get_cache_path(dataset, key, META,", "test_set, self.config, training_set_metadata, TEST, ) if validation_set is not None:", "logger.info(\"Writing preprocessed training set cache\") training_set = self.dataset_manager.save( self.cache_map[TRAINING], training_set,", "self.get_cache_key(dataset, config) cache_map = { META: self.get_cache_path(dataset, key, META, \"json\"),", "self.config, training_set_metadata, VALIDATION, ) logger.info(\"Writing train set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata)", "the in-memory dataset, but this is tricky for Dask return", "import re import uuid from pathlib import Path from ludwig.constants", "test_set = self.dataset_manager.save( self.cache_map[TEST], test_set, self.config, training_set_metadata, TEST, ) if", "training_set=None, test_set=None, validation_set=None): if dataset is not None: key =", "if path_exists(self.cache_map[VALIDATION]) else None valid = self.checksum == cache_training_set_metadata.get(CHECKSUM) and", "checksum, cache_map, dataset_manager): self.config = config self.checksum = checksum self.cache_map", "self.get_cache_path(validation_set, key, VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager) def", "config, checksum, cache_map, dataset_manager): self.config = config self.checksum = checksum", "config) def get_cache_path(self, dataset, key, tag, ext=None): if not isinstance(dataset,", "VALIDATION: self.get_cache_path(validation_set, key, VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager)", "= self.cache_map[META] if not path_exists(training_set_metadata_fp): return None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp)", "os.path.dirname(input_fname) return \".\" return self._cache_dir def can_cache(self, skip_save_processed_input): return self._dataset_manager.can_cache(skip_save_processed_input)", "if input_fname is not None: return os.path.dirname(input_fname) return \".\" return", "alphanum(key) ext = ext or self.data_format cache_fname = f\"{stem}.{tag}.{ext}\" return", "key, TRAINING), TEST: self.get_cache_path(test_set, key, TEST), VALIDATION: self.get_cache_path(validation_set, key, VALIDATION),", "for Dask return str(uuid.uuid1()) return calculate_checksum(dataset, config) def get_cache_path(self, dataset,", "key, META, \"json\"), TRAINING: self.get_cache_path(training_set, key, TRAINING), TEST: self.get_cache_path(test_set, key,", "is tricky for Dask return str(uuid.uuid1()) return calculate_checksum(dataset, config) def", "Path from ludwig.constants import CHECKSUM, META, TEST, TRAINING, VALIDATION from", "cached_validation_set = self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else None valid = self.checksum", "self.get_cache_key(training_set, config) cache_map = { META: self.get_cache_path(training_set, key, META, \"json\"),", "else None valid = self.checksum == cache_training_set_metadata.get(CHECKSUM) and cached_training_set is", "path_exists(self.cache_map[TRAINING]) else None cached_test_set = self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else None", "calculate_checksum(dataset, config) def get_cache_path(self, dataset, key, tag, ext=None): if not", "self.cache_map[TRAINING], training_set, self.config, training_set_metadata, TRAINING, ) if test_set is not", "test_set, validation_set, training_set_metadata def delete(self): for fname in self.cache_map.values(): if", "not path_exists(training_set_metadata_fp): return None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING]", "self.dataset_manager.save( self.cache_map[TEST], test_set, self.config, training_set_metadata, TEST, ) if validation_set is", "str(uuid.uuid1()) return calculate_checksum(dataset, config) def get_cache_path(self, dataset, key, tag, ext=None):", "training_set_metadata_fp = self.cache_map[META] if not path_exists(training_set_metadata_fp): return None cache_training_set_metadata =", "path_exists(self.cache_map[VALIDATION]) else None valid = self.checksum == cache_training_set_metadata.get(CHECKSUM) and cached_training_set", "return DatasetCache(config, key, cache_map, self._dataset_manager) else: key = self.get_cache_key(training_set, config)", "is not None return valid, cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set def", "= self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config, training_set_metadata, TRAINING, ) if test_set", "def get_dataset_cache(self, config, dataset=None, training_set=None, test_set=None, validation_set=None): if dataset is", "logger.info(\"Writing train set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set, test_set, validation_set,", "if not isinstance(dataset, str): # TODO(travis): could try hashing the", "def can_cache(self, skip_save_processed_input): return self._dataset_manager.can_cache(skip_save_processed_input) @property def data_format(self): return self._dataset_manager.data_format", "if test_set is not None: logger.info(\"Writing preprocessed test set cache\")", "= { META: self.get_cache_path(dataset, key, META, \"json\"), TRAINING: self.get_cache_path(dataset, key,", "cache_fname) def get_cache_directory(self, input_fname): if self._cache_dir is None: if input_fname", "not None: key = self.get_cache_key(dataset, config) cache_map = { META:", "its alphanumeric characters.\"\"\" return re.sub(r\"\\W+\", \"\", v) class DatasetCache: def", "if self._cache_dir is None and dataset is not None: #", "\"json\"), TRAINING: self.get_cache_path(training_set, key, TRAINING), TEST: self.get_cache_path(test_set, key, TEST), VALIDATION:", "cache_dir=None): self._dataset_manager = dataset_manager self._cache_dir = cache_dir def get_dataset_cache(self, config,", "cache_map self.dataset_manager = dataset_manager def get(self): training_set_metadata_fp = self.cache_map[META] if", "= self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else None valid = self.checksum ==", "None: if input_fname is not None: return os.path.dirname(input_fname) return \".\"", "(minus the extension) as the cache path stem = Path(dataset).stem", "if dataset is not None: key = self.get_cache_key(dataset, config) cache_map", "preprocessed test set cache\") test_set = self.dataset_manager.save( self.cache_map[TEST], test_set, self.config,", "def get_cache_path(self, dataset, key, tag, ext=None): if not isinstance(dataset, str):", "alphanumeric characters.\"\"\" return re.sub(r\"\\W+\", \"\", v) class DatasetCache: def __init__(self,", "Use the input dataset filename (minus the extension) as the", "dataset is not None: # Use the input dataset filename", "delete, path_exists logger = logging.getLogger(__name__) def alphanum(v): \"\"\"Filters a string", "return training_set, test_set, validation_set, training_set_metadata def delete(self): for fname in", "return \".\" return self._cache_dir def can_cache(self, skip_save_processed_input): return self._dataset_manager.can_cache(skip_save_processed_input) @property", "a string to only its alphanumeric characters.\"\"\" return re.sub(r\"\\W+\", \"\",", "str): # TODO(travis): could try hashing the in-memory dataset, but", "ludwig.constants import CHECKSUM, META, TEST, TRAINING, VALIDATION from ludwig.data.cache.util import", "= checksum self.cache_map = cache_map self.dataset_manager = dataset_manager def get(self):", "training_set, self.config, training_set_metadata, TRAINING, ) if test_set is not None:", "CHECKSUM, META, TEST, TRAINING, VALIDATION from ludwig.data.cache.util import calculate_checksum from", "key, VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager) def get_cache_key(self,", "the input dataset filename (minus the extension) as the cache", "cache_fname = f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self, input_fname): if", "ludwig.data.cache.util import calculate_checksum from ludwig.utils import data_utils from ludwig.utils.fs_utils import", "cache_map = { META: self.get_cache_path(training_set, key, META, \"json\"), TRAINING: self.get_cache_path(training_set,", "META, \"json\"), TRAINING: self.get_cache_path(dataset, key, TRAINING), TEST: self.get_cache_path(dataset, key, TEST),", "def delete(self): for fname in self.cache_map.values(): if path_exists(fname): delete(fname) class", "get_cache_directory(self, input_fname): if self._cache_dir is None: if input_fname is not", "None valid = self.checksum == cache_training_set_metadata.get(CHECKSUM) and cached_training_set is not", "VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager) else: key =", "return valid, cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set def put(self, training_set, test_set,", "delete(fname) class CacheManager: def __init__(self, dataset_manager, cache_dir=None): self._dataset_manager = dataset_manager", "if validation_set is not None: logger.info(\"Writing preprocessed validation set cache\")", "the cache path stem = alphanum(key) ext = ext or", "and cached_training_set is not None return valid, cache_training_set_metadata, cached_training_set, cached_test_set,", "tag, ext=None): if not isinstance(dataset, str): dataset = None if", "input_fname): if self._cache_dir is None: if input_fname is not None:", "different directories, we use the unique checksum # as the", "TEST, ) if validation_set is not None: logger.info(\"Writing preprocessed validation", "config self.checksum = checksum self.cache_map = cache_map self.dataset_manager = dataset_manager", "None return valid, cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set def put(self, training_set,", "not None: logger.info(\"Writing preprocessed test set cache\") test_set = self.dataset_manager.save(", "cached_training_set, cached_test_set, cached_validation_set def put(self, training_set, test_set, validation_set, training_set_metadata): logger.info(\"Writing", "this is tricky for Dask return str(uuid.uuid1()) return calculate_checksum(dataset, config)", "isinstance(dataset, str): # TODO(travis): could try hashing the in-memory dataset,", "the cache path stem = Path(dataset).stem else: # To avoid", "= { META: self.get_cache_path(training_set, key, META, \"json\"), TRAINING: self.get_cache_path(training_set, key,", "training_set_metadata, TRAINING, ) if test_set is not None: logger.info(\"Writing preprocessed", "set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set, test_set, validation_set, training_set_metadata def", "TRAINING), TEST: self.get_cache_path(test_set, key, TEST), VALIDATION: self.get_cache_path(validation_set, key, VALIDATION), }", "not isinstance(dataset, str): # TODO(travis): could try hashing the in-memory", "= data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else None cached_test_set", "None: key = self.get_cache_key(dataset, config) cache_map = { META: self.get_cache_path(dataset,", "self.data_format cache_fname = f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self, input_fname):", "string to only its alphanumeric characters.\"\"\" return re.sub(r\"\\W+\", \"\", v)", "TEST: self.get_cache_path(test_set, key, TEST), VALIDATION: self.get_cache_path(validation_set, key, VALIDATION), } return", "self._cache_dir is None and dataset is not None: # Use", "None cached_test_set = self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else None cached_validation_set =", "self.get_cache_path(training_set, key, META, \"json\"), TRAINING: self.get_cache_path(training_set, key, TRAINING), TEST: self.get_cache_path(test_set,", "get_cache_key(self, dataset, config): if not isinstance(dataset, str): # TODO(travis): could", "from ludwig.utils.fs_utils import delete, path_exists logger = logging.getLogger(__name__) def alphanum(v):", "self.cache_map[META] if not path_exists(training_set_metadata_fp): return None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set", "self.config, training_set_metadata, TRAINING, ) if test_set is not None: logger.info(\"Writing", "cache\") test_set = self.dataset_manager.save( self.cache_map[TEST], test_set, self.config, training_set_metadata, TEST, )", "dataset is not None: key = self.get_cache_key(dataset, config) cache_map =", "only its alphanumeric characters.\"\"\" return re.sub(r\"\\W+\", \"\", v) class DatasetCache:", "dataset_manager, cache_dir=None): self._dataset_manager = dataset_manager self._cache_dir = cache_dir def get_dataset_cache(self,", "= logging.getLogger(__name__) def alphanum(v): \"\"\"Filters a string to only its", "TRAINING), TEST: self.get_cache_path(dataset, key, TEST), VALIDATION: self.get_cache_path(dataset, key, VALIDATION), }", "self.checksum = checksum self.cache_map = cache_map self.dataset_manager = dataset_manager def", "path_exists(fname): delete(fname) class CacheManager: def __init__(self, dataset_manager, cache_dir=None): self._dataset_manager =", "test_set, validation_set, training_set_metadata): logger.info(\"Writing preprocessed training set cache\") training_set =", "is None and dataset is not None: # Use the", "self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata, VALIDATION, ) logger.info(\"Writing train set metadata\")", "training_set_metadata) return training_set, test_set, validation_set, training_set_metadata def delete(self): for fname", "metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set, test_set, validation_set, training_set_metadata def delete(self):", "key = self.get_cache_key(dataset, config) cache_map = { META: self.get_cache_path(dataset, key,", "return str(uuid.uuid1()) return calculate_checksum(dataset, config) def get_cache_path(self, dataset, key, tag,", "META, TEST, TRAINING, VALIDATION from ludwig.data.cache.util import calculate_checksum from ludwig.utils", "return re.sub(r\"\\W+\", \"\", v) class DatasetCache: def __init__(self, config, checksum,", "= self.dataset_manager.save( self.cache_map[TEST], test_set, self.config, training_set_metadata, TEST, ) if validation_set", "validation_set, training_set_metadata def delete(self): for fname in self.cache_map.values(): if path_exists(fname):", "} return DatasetCache(config, key, cache_map, self._dataset_manager) else: key = self.get_cache_key(training_set,", ") logger.info(\"Writing train set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set, test_set,", "cache_training_set_metadata.get(CHECKSUM) and cached_training_set is not None return valid, cache_training_set_metadata, cached_training_set,", "logging import os import re import uuid from pathlib import", "TRAINING, VALIDATION from ludwig.data.cache.util import calculate_checksum from ludwig.utils import data_utils", "str): dataset = None if self._cache_dir is None and dataset", "self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else None cached_test_set = self.cache_map[TEST] if path_exists(self.cache_map[TEST])", "self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else None cached_validation_set = self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION])", "self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata, VALIDATION, ) logger.info(\"Writing train set", "valid, cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set def put(self, training_set, test_set, validation_set,", "return os.path.dirname(input_fname) return \".\" return self._cache_dir def can_cache(self, skip_save_processed_input): return", "= None if self._cache_dir is None and dataset is not", "return calculate_checksum(dataset, config) def get_cache_path(self, dataset, key, tag, ext=None): if", "avoid collisions across different directories, we use the unique checksum", "= ext or self.data_format cache_fname = f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname)", "set cache\") training_set = self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config, training_set_metadata, TRAINING,", "f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self, input_fname): if self._cache_dir is", "__init__(self, dataset_manager, cache_dir=None): self._dataset_manager = dataset_manager self._cache_dir = cache_dir def", "def get(self): training_set_metadata_fp = self.cache_map[META] if not path_exists(training_set_metadata_fp): return None", "key = self.get_cache_key(training_set, config) cache_map = { META: self.get_cache_path(training_set, key,", "DatasetCache(config, key, cache_map, self._dataset_manager) else: key = self.get_cache_key(training_set, config) cache_map", "else None cached_validation_set = self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else None valid", "input_fname is not None: return os.path.dirname(input_fname) return \".\" return self._cache_dir", "os import re import uuid from pathlib import Path from", "ext or self.data_format cache_fname = f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname) def", "import delete, path_exists logger = logging.getLogger(__name__) def alphanum(v): \"\"\"Filters a", "ludwig.utils import data_utils from ludwig.utils.fs_utils import delete, path_exists logger =", "get_cache_path(self, dataset, key, tag, ext=None): if not isinstance(dataset, str): dataset", "validation_set=None): if dataset is not None: key = self.get_cache_key(dataset, config)", "None and dataset is not None: # Use the input", "return self._cache_dir def can_cache(self, skip_save_processed_input): return self._dataset_manager.can_cache(skip_save_processed_input) @property def data_format(self):", "self._dataset_manager) else: key = self.get_cache_key(training_set, config) cache_map = { META:", "cache_map, self._dataset_manager) def get_cache_key(self, dataset, config): if not isinstance(dataset, str):", "self.get_cache_path(dataset, key, TEST), VALIDATION: self.get_cache_path(dataset, key, VALIDATION), } return DatasetCache(config,", "calculate_checksum from ludwig.utils import data_utils from ludwig.utils.fs_utils import delete, path_exists", "None: logger.info(\"Writing preprocessed test set cache\") test_set = self.dataset_manager.save( self.cache_map[TEST],", "None if self._cache_dir is None and dataset is not None:", "self._cache_dir is None: if input_fname is not None: return os.path.dirname(input_fname)", "self._dataset_manager = dataset_manager self._cache_dir = cache_dir def get_dataset_cache(self, config, dataset=None,", "TEST: self.get_cache_path(dataset, key, TEST), VALIDATION: self.get_cache_path(dataset, key, VALIDATION), } return", "path stem = alphanum(key) ext = ext or self.data_format cache_fname", "self._cache_dir = cache_dir def get_dataset_cache(self, config, dataset=None, training_set=None, test_set=None, validation_set=None):", "# To avoid collisions across different directories, we use the", "v) class DatasetCache: def __init__(self, config, checksum, cache_map, dataset_manager): self.config", "== cache_training_set_metadata.get(CHECKSUM) and cached_training_set is not None return valid, cache_training_set_metadata,", "= self.get_cache_key(training_set, config) cache_map = { META: self.get_cache_path(training_set, key, META,", "= self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else None cached_test_set = self.cache_map[TEST] if", "self.config = config self.checksum = checksum self.cache_map = cache_map self.dataset_manager", "= cache_dir def get_dataset_cache(self, config, dataset=None, training_set=None, test_set=None, validation_set=None): if", "= self.checksum == cache_training_set_metadata.get(CHECKSUM) and cached_training_set is not None return", "training_set, test_set, validation_set, training_set_metadata): logger.info(\"Writing preprocessed training set cache\") training_set", "path_exists logger = logging.getLogger(__name__) def alphanum(v): \"\"\"Filters a string to", "use the unique checksum # as the cache path stem", "validation_set is not None: logger.info(\"Writing preprocessed validation set cache\") validation_set", "TEST), VALIDATION: self.get_cache_path(validation_set, key, VALIDATION), } return DatasetCache(config, key, cache_map,", "TRAINING, ) if test_set is not None: logger.info(\"Writing preprocessed test", "VALIDATION: self.get_cache_path(dataset, key, VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager)", "training_set_metadata def delete(self): for fname in self.cache_map.values(): if path_exists(fname): delete(fname)", "cache_map, self._dataset_manager) else: key = self.get_cache_key(training_set, config) cache_map = {", "META, \"json\"), TRAINING: self.get_cache_path(training_set, key, TRAINING), TEST: self.get_cache_path(test_set, key, TEST),", "META: self.get_cache_path(dataset, key, META, \"json\"), TRAINING: self.get_cache_path(dataset, key, TRAINING), TEST:", "else: key = self.get_cache_key(training_set, config) cache_map = { META: self.get_cache_path(training_set,", "def alphanum(v): \"\"\"Filters a string to only its alphanumeric characters.\"\"\"", "re import uuid from pathlib import Path from ludwig.constants import", "dataset_manager def get(self): training_set_metadata_fp = self.cache_map[META] if not path_exists(training_set_metadata_fp): return", "dataset, but this is tricky for Dask return str(uuid.uuid1()) return", "fname in self.cache_map.values(): if path_exists(fname): delete(fname) class CacheManager: def __init__(self,", "self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config, training_set_metadata, TRAINING, ) if test_set is", "preprocessed validation set cache\") validation_set = self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config,", "uuid from pathlib import Path from ludwig.constants import CHECKSUM, META,", "re.sub(r\"\\W+\", \"\", v) class DatasetCache: def __init__(self, config, checksum, cache_map,", "if self._cache_dir is None: if input_fname is not None: return", "to only its alphanumeric characters.\"\"\" return re.sub(r\"\\W+\", \"\", v) class", "the extension) as the cache path stem = Path(dataset).stem else:", "TRAINING: self.get_cache_path(dataset, key, TRAINING), TEST: self.get_cache_path(dataset, key, TEST), VALIDATION: self.get_cache_path(dataset,", "else: # To avoid collisions across different directories, we use", "is not None: # Use the input dataset filename (minus", "or self.data_format cache_fname = f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self,", "logging.getLogger(__name__) def alphanum(v): \"\"\"Filters a string to only its alphanumeric", "dataset_manager self._cache_dir = cache_dir def get_dataset_cache(self, config, dataset=None, training_set=None, test_set=None,", "filename (minus the extension) as the cache path stem =", "\"json\"), TRAINING: self.get_cache_path(dataset, key, TRAINING), TEST: self.get_cache_path(dataset, key, TEST), VALIDATION:", "TODO(travis): could try hashing the in-memory dataset, but this is", "cache_map = { META: self.get_cache_path(dataset, key, META, \"json\"), TRAINING: self.get_cache_path(dataset,", "extension) as the cache path stem = Path(dataset).stem else: #", "tricky for Dask return str(uuid.uuid1()) return calculate_checksum(dataset, config) def get_cache_path(self,", "collisions across different directories, we use the unique checksum #", "cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set def put(self, training_set, test_set, validation_set, training_set_metadata):", "# as the cache path stem = alphanum(key) ext =", "DatasetCache: def __init__(self, config, checksum, cache_map, dataset_manager): self.config = config", "To avoid collisions across different directories, we use the unique", "validation_set = self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata, VALIDATION, ) logger.info(\"Writing", "def put(self, training_set, test_set, validation_set, training_set_metadata): logger.info(\"Writing preprocessed training set", "get(self): training_set_metadata_fp = self.cache_map[META] if not path_exists(training_set_metadata_fp): return None cache_training_set_metadata", "checksum self.cache_map = cache_map self.dataset_manager = dataset_manager def get(self): training_set_metadata_fp", "self._cache_dir def can_cache(self, skip_save_processed_input): return self._dataset_manager.can_cache(skip_save_processed_input) @property def data_format(self): return", "but this is tricky for Dask return str(uuid.uuid1()) return calculate_checksum(dataset,", "cached_test_set = self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else None cached_validation_set = self.cache_map[VALIDATION]", "directories, we use the unique checksum # as the cache", "import uuid from pathlib import Path from ludwig.constants import CHECKSUM,", "TEST, TRAINING, VALIDATION from ludwig.data.cache.util import calculate_checksum from ludwig.utils import", "cache_dir def get_dataset_cache(self, config, dataset=None, training_set=None, test_set=None, validation_set=None): if dataset", "import calculate_checksum from ludwig.utils import data_utils from ludwig.utils.fs_utils import delete,", "= f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self, input_fname): if self._cache_dir", "could try hashing the in-memory dataset, but this is tricky", "# TODO(travis): could try hashing the in-memory dataset, but this", "None: # Use the input dataset filename (minus the extension)", "def __init__(self, config, checksum, cache_map, dataset_manager): self.config = config self.checksum", "not None: # Use the input dataset filename (minus the", "dataset_manager): self.config = config self.checksum = checksum self.cache_map = cache_map", "training_set, test_set, validation_set, training_set_metadata def delete(self): for fname in self.cache_map.values():", "cache\") training_set = self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config, training_set_metadata, TRAINING, )", "logger.info(\"Writing preprocessed validation set cache\") validation_set = self.dataset_manager.save( self.cache_map[VALIDATION], validation_set,", "config) cache_map = { META: self.get_cache_path(training_set, key, META, \"json\"), TRAINING:", "self.dataset_manager = dataset_manager def get(self): training_set_metadata_fp = self.cache_map[META] if not", "key, tag, ext=None): if not isinstance(dataset, str): dataset = None", "stem = Path(dataset).stem else: # To avoid collisions across different", "} return DatasetCache(config, key, cache_map, self._dataset_manager) def get_cache_key(self, dataset, config):", "return DatasetCache(config, key, cache_map, self._dataset_manager) def get_cache_key(self, dataset, config): if", "cached_test_set, cached_validation_set def put(self, training_set, test_set, validation_set, training_set_metadata): logger.info(\"Writing preprocessed", "unique checksum # as the cache path stem = alphanum(key)", "key, TRAINING), TEST: self.get_cache_path(dataset, key, TEST), VALIDATION: self.get_cache_path(dataset, key, VALIDATION),", "config): if not isinstance(dataset, str): # TODO(travis): could try hashing", "cache path stem = alphanum(key) ext = ext or self.data_format", "data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else None cached_test_set =", "import CHECKSUM, META, TEST, TRAINING, VALIDATION from ludwig.data.cache.util import calculate_checksum", "os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self, input_fname): if self._cache_dir is None: if", "VALIDATION, ) logger.info(\"Writing train set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set,", "if path_exists(self.cache_map[TRAINING]) else None cached_test_set = self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else", "import logging import os import re import uuid from pathlib", "else None cached_test_set = self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else None cached_validation_set", "checksum # as the cache path stem = alphanum(key) ext", "cached_training_set is not None return valid, cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set", "self.cache_map[TEST], test_set, self.config, training_set_metadata, TEST, ) if validation_set is not", "if path_exists(fname): delete(fname) class CacheManager: def __init__(self, dataset_manager, cache_dir=None): self._dataset_manager", "# Use the input dataset filename (minus the extension) as", "= alphanum(key) ext = ext or self.data_format cache_fname = f\"{stem}.{tag}.{ext}\"", "None: return os.path.dirname(input_fname) return \".\" return self._cache_dir def can_cache(self, skip_save_processed_input):", "self.config, training_set_metadata, TEST, ) if validation_set is not None: logger.info(\"Writing", "config) cache_map = { META: self.get_cache_path(dataset, key, META, \"json\"), TRAINING:", "path_exists(training_set_metadata_fp): return None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING] if", "__init__(self, config, checksum, cache_map, dataset_manager): self.config = config self.checksum =", "get_dataset_cache(self, config, dataset=None, training_set=None, test_set=None, validation_set=None): if dataset is not", "is not None: return os.path.dirname(input_fname) return \".\" return self._cache_dir def", "self._dataset_manager) def get_cache_key(self, dataset, config): if not isinstance(dataset, str): #", "path stem = Path(dataset).stem else: # To avoid collisions across", "class CacheManager: def __init__(self, dataset_manager, cache_dir=None): self._dataset_manager = dataset_manager self._cache_dir", "cache\") validation_set = self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata, VALIDATION, )", "training_set_metadata): logger.info(\"Writing preprocessed training set cache\") training_set = self.dataset_manager.save( self.cache_map[TRAINING],", ") if test_set is not None: logger.info(\"Writing preprocessed test set", "test_set=None, validation_set=None): if dataset is not None: key = self.get_cache_key(dataset,", "path_exists(self.cache_map[TEST]) else None cached_validation_set = self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else None", "\".\" return self._cache_dir def can_cache(self, skip_save_processed_input): return self._dataset_manager.can_cache(skip_save_processed_input) @property def", "META: self.get_cache_path(training_set, key, META, \"json\"), TRAINING: self.get_cache_path(training_set, key, TRAINING), TEST:", "valid = self.checksum == cache_training_set_metadata.get(CHECKSUM) and cached_training_set is not None", "for fname in self.cache_map.values(): if path_exists(fname): delete(fname) class CacheManager: def", "not None: logger.info(\"Writing preprocessed validation set cache\") validation_set = self.dataset_manager.save(", "data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set, test_set, validation_set, training_set_metadata def delete(self): for", "as the cache path stem = Path(dataset).stem else: # To", "ext = ext or self.data_format cache_fname = f\"{stem}.{tag}.{ext}\" return os.path.join(self.get_cache_directory(dataset),", "key, cache_map, self._dataset_manager) else: key = self.get_cache_key(training_set, config) cache_map =", "key, VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager) else: key", "= dataset_manager def get(self): training_set_metadata_fp = self.cache_map[META] if not path_exists(training_set_metadata_fp):", "self.get_cache_path(training_set, key, TRAINING), TEST: self.get_cache_path(test_set, key, TEST), VALIDATION: self.get_cache_path(validation_set, key,", "{ META: self.get_cache_path(dataset, key, META, \"json\"), TRAINING: self.get_cache_path(dataset, key, TRAINING),", "key, cache_map, self._dataset_manager) def get_cache_key(self, dataset, config): if not isinstance(dataset,", "and dataset is not None: # Use the input dataset", "train set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return training_set, test_set, validation_set, training_set_metadata", "CacheManager: def __init__(self, dataset_manager, cache_dir=None): self._dataset_manager = dataset_manager self._cache_dir =", "in-memory dataset, but this is tricky for Dask return str(uuid.uuid1())", "= config self.checksum = checksum self.cache_map = cache_map self.dataset_manager =", "VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager) def get_cache_key(self, dataset,", "test_set is not None: logger.info(\"Writing preprocessed test set cache\") test_set", "key, TEST), VALIDATION: self.get_cache_path(dataset, key, VALIDATION), } return DatasetCache(config, key,", "self.cache_map = cache_map self.dataset_manager = dataset_manager def get(self): training_set_metadata_fp =", "try hashing the in-memory dataset, but this is tricky for", "dataset filename (minus the extension) as the cache path stem", "= self.cache_map[TEST] if path_exists(self.cache_map[TEST]) else None cached_validation_set = self.cache_map[VALIDATION] if", "TRAINING: self.get_cache_path(training_set, key, TRAINING), TEST: self.get_cache_path(test_set, key, TEST), VALIDATION: self.get_cache_path(validation_set,", "logger = logging.getLogger(__name__) def alphanum(v): \"\"\"Filters a string to only", "cached_training_set = self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else None cached_test_set = self.cache_map[TEST]", "the unique checksum # as the cache path stem =", "VALIDATION from ludwig.data.cache.util import calculate_checksum from ludwig.utils import data_utils from", "\"\"\"Filters a string to only its alphanumeric characters.\"\"\" return re.sub(r\"\\W+\",", "= dataset_manager self._cache_dir = cache_dir def get_dataset_cache(self, config, dataset=None, training_set=None,", "cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else None", "put(self, training_set, test_set, validation_set, training_set_metadata): logger.info(\"Writing preprocessed training set cache\")", ") if validation_set is not None: logger.info(\"Writing preprocessed validation set", "import os import re import uuid from pathlib import Path", "pathlib import Path from ludwig.constants import CHECKSUM, META, TEST, TRAINING,", "\"\", v) class DatasetCache: def __init__(self, config, checksum, cache_map, dataset_manager):", "hashing the in-memory dataset, but this is tricky for Dask", "self.get_cache_path(test_set, key, TEST), VALIDATION: self.get_cache_path(validation_set, key, VALIDATION), } return DatasetCache(config,", "characters.\"\"\" return re.sub(r\"\\W+\", \"\", v) class DatasetCache: def __init__(self, config,", "self.get_cache_path(dataset, key, META, \"json\"), TRAINING: self.get_cache_path(dataset, key, TRAINING), TEST: self.get_cache_path(dataset,", "import Path from ludwig.constants import CHECKSUM, META, TEST, TRAINING, VALIDATION", "= cache_map self.dataset_manager = dataset_manager def get(self): training_set_metadata_fp = self.cache_map[META]", "self.get_cache_path(dataset, key, TRAINING), TEST: self.get_cache_path(dataset, key, TEST), VALIDATION: self.get_cache_path(dataset, key,", "= self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata, VALIDATION, ) logger.info(\"Writing train", "dataset=None, training_set=None, test_set=None, validation_set=None): if dataset is not None: key", "from ludwig.data.cache.util import calculate_checksum from ludwig.utils import data_utils from ludwig.utils.fs_utils", "training_set = self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config, training_set_metadata, TRAINING, ) if", "test set cache\") test_set = self.dataset_manager.save( self.cache_map[TEST], test_set, self.config, training_set_metadata,", "is not None: logger.info(\"Writing preprocessed validation set cache\") validation_set =", "if not path_exists(training_set_metadata_fp): return None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set =", "across different directories, we use the unique checksum # as", "TEST), VALIDATION: self.get_cache_path(dataset, key, VALIDATION), } return DatasetCache(config, key, cache_map,", "ext=None): if not isinstance(dataset, str): dataset = None if self._cache_dir", "from pathlib import Path from ludwig.constants import CHECKSUM, META, TEST,", "Path(dataset).stem else: # To avoid collisions across different directories, we", "cache_map, dataset_manager): self.config = config self.checksum = checksum self.cache_map =", "Dask return str(uuid.uuid1()) return calculate_checksum(dataset, config) def get_cache_path(self, dataset, key,", "DatasetCache(config, key, cache_map, self._dataset_manager) def get_cache_key(self, dataset, config): if not", "is None: if input_fname is not None: return os.path.dirname(input_fname) return", "set cache\") test_set = self.dataset_manager.save( self.cache_map[TEST], test_set, self.config, training_set_metadata, TEST,", "not None: return os.path.dirname(input_fname) return \".\" return self._cache_dir def can_cache(self,", "as the cache path stem = alphanum(key) ext = ext", "training set cache\") training_set = self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config, training_set_metadata,", "we use the unique checksum # as the cache path", "def __init__(self, dataset_manager, cache_dir=None): self._dataset_manager = dataset_manager self._cache_dir = cache_dir", "isinstance(dataset, str): dataset = None if self._cache_dir is None and", "validation set cache\") validation_set = self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata,", "None cached_validation_set = self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else None valid =", "{ META: self.get_cache_path(training_set, key, META, \"json\"), TRAINING: self.get_cache_path(training_set, key, TRAINING),", "training_set_metadata, VALIDATION, ) logger.info(\"Writing train set metadata\") data_utils.save_json(self.cache_map[META], training_set_metadata) return", "not isinstance(dataset, str): dataset = None if self._cache_dir is None", "self.get_cache_path(dataset, key, VALIDATION), } return DatasetCache(config, key, cache_map, self._dataset_manager) else:", "None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING]) else", "is not None: logger.info(\"Writing preprocessed test set cache\") test_set =", "self.cache_map[VALIDATION] if path_exists(self.cache_map[VALIDATION]) else None valid = self.checksum == cache_training_set_metadata.get(CHECKSUM)", "def get_cache_directory(self, input_fname): if self._cache_dir is None: if input_fname is", "validation_set, training_set_metadata): logger.info(\"Writing preprocessed training set cache\") training_set = self.dataset_manager.save(", "ludwig.utils.fs_utils import delete, path_exists logger = logging.getLogger(__name__) def alphanum(v): \"\"\"Filters", "from ludwig.constants import CHECKSUM, META, TEST, TRAINING, VALIDATION from ludwig.data.cache.util", "key, TEST), VALIDATION: self.get_cache_path(validation_set, key, VALIDATION), } return DatasetCache(config, key,", "input dataset filename (minus the extension) as the cache path", "return None cache_training_set_metadata = data_utils.load_json(training_set_metadata_fp) cached_training_set = self.cache_map[TRAINING] if path_exists(self.cache_map[TRAINING])", "validation_set, self.config, training_set_metadata, VALIDATION, ) logger.info(\"Writing train set metadata\") data_utils.save_json(self.cache_map[META],", "return os.path.join(self.get_cache_directory(dataset), cache_fname) def get_cache_directory(self, input_fname): if self._cache_dir is None:", "def get_cache_key(self, dataset, config): if not isinstance(dataset, str): # TODO(travis):", "training_set_metadata, TEST, ) if validation_set is not None: logger.info(\"Writing preprocessed", "set cache\") validation_set = self.dataset_manager.save( self.cache_map[VALIDATION], validation_set, self.config, training_set_metadata, VALIDATION,", "None: logger.info(\"Writing preprocessed validation set cache\") validation_set = self.dataset_manager.save( self.cache_map[VALIDATION],", "dataset = None if self._cache_dir is None and dataset is", "cache path stem = Path(dataset).stem else: # To avoid collisions", "is not None: key = self.get_cache_key(dataset, config) cache_map = {", "not None return valid, cache_training_set_metadata, cached_training_set, cached_test_set, cached_validation_set def put(self,", "delete(self): for fname in self.cache_map.values(): if path_exists(fname): delete(fname) class CacheManager:", "preprocessed training set cache\") training_set = self.dataset_manager.save( self.cache_map[TRAINING], training_set, self.config,", "logger.info(\"Writing preprocessed test set cache\") test_set = self.dataset_manager.save( self.cache_map[TEST], test_set,", "alphanum(v): \"\"\"Filters a string to only its alphanumeric characters.\"\"\" return", "if not isinstance(dataset, str): dataset = None if self._cache_dir is", "= Path(dataset).stem else: # To avoid collisions across different directories,", "config, dataset=None, training_set=None, test_set=None, validation_set=None): if dataset is not None:", "key, META, \"json\"), TRAINING: self.get_cache_path(dataset, key, TRAINING), TEST: self.get_cache_path(dataset, key,", "dataset, key, tag, ext=None): if not isinstance(dataset, str): dataset =" ]
[ "'change_machine_failed': { } } for recipe in sorted_recipe_list: for mining_research_modifier", "CalculatorBase, failed_dict: dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for recipe in recipe_list:", "1) def test_calculator_base(failed_dict): mrms = [0, 0.3] pm = [None,", "{ } } for recipe in sorted_recipe_list: for mining_research_modifier in", "test_obj = CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe,", "test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for recipe in", "from FactorioCalcBase.calculator_base import CalculatorBase from FactorioCalcBase.dependency_dict_common_function import dict_add_number import time", "from FactorioCalcBase.recipe import Recipe from FactorioCalcBase.calculator_base import CalculatorBase from FactorioCalcBase.dependency_dict_common_function", "= [1, 101.5] failed_dict['init_failed'] = {} failed_dict['method_failed'] = { 'change_machine_failed':", "except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def test_calculator_base(failed_dict): mrms = [0, 0.3]", "in sorted_recipe_list: for mining_research_modifier in mrms: for preferred_machines in pm:", "\"stone-furnace\", \"burner-mining-drill\"]] uk = [True, False] am = [1, 101.5]", "production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {} if len(available_machine_list) > 1: for machine", "failed_dict['method_failed']['change_machine_failed'] = {} if len(available_machine_list) > 1: for machine in", "if test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise 'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict:", "pm: for use_kovarex in uk: for amount in am: try:", "[0, 0.3] pm = [None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk =", "0.3] pm = [None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk = [True,", "import time def test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict): recipe_obj = Recipe(recipe_name=target_recipe)", "1: for machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] !=", "failed_dict['init_failed'] = {} failed_dict['method_failed'] = { 'change_machine_failed': { } }", "> 1: for machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name']", "time def test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict): recipe_obj = Recipe(recipe_name=target_recipe) cat", "target_recipe, failed_dict): recipe_obj = Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category() available_machine_list =", "def test_calculator_base(failed_dict): mrms = [0, 0.3] pm = [None, [\"assembling-machine-2\",", "failed_dict) pprint.pp(failed_dict) return failed_dict def run_test(): start_time = time.time() test_calculator_base({})", "failed_dict: dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for recipe in recipe_list: try:", "failed_dict def run_test(): start_time = time.time() test_calculator_base({}) print(f'finished in {time.time()-start_time}')", "failed_dict): recipe_obj = Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat)", "[\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk = [True, False] am = [1,", "mrms: for preferred_machines in pm: for use_kovarex in uk: for", "import pprint from FactorioCalcBase.data.binary import sorted_recipe_list, production_machine_category_list_dict from FactorioCalcBase.recipe import", "recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {} if len(available_machine_list) >", "= {} if len(available_machine_list) > 1: for machine in available_machine_list:", "failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def test_calculator_base(failed_dict): mrms = [0,", "recipe_obj = Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed']", "sorted_recipe_list: for mining_research_modifier in mrms: for preferred_machines in pm: for", "pprint from FactorioCalcBase.data.binary import sorted_recipe_list, production_machine_category_list_dict from FactorioCalcBase.recipe import Recipe", "uk = [True, False] am = [1, 101.5] failed_dict['init_failed'] =", "dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def test_calculator_base(failed_dict): mrms = [0, 0.3] pm", "amount in am: try: test_obj = CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex,", "= { 'change_machine_failed': { } } for recipe in sorted_recipe_list:", "for preferred_machines in pm: for use_kovarex in uk: for amount", "test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return failed_dict def run_test(): start_time = time.time()", "val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return failed_dict def run_test(): start_time =", "if len(available_machine_list) > 1: for machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine)", "production_machine_category_list_dict from FactorioCalcBase.recipe import Recipe from FactorioCalcBase.calculator_base import CalculatorBase from", "use_kovarex in uk: for amount in am: try: test_obj =", "pprint.pp(failed_dict) return failed_dict def run_test(): start_time = time.time() test_calculator_base({}) print(f'finished", "Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {}", "import Recipe from FactorioCalcBase.calculator_base import CalculatorBase from FactorioCalcBase.dependency_dict_common_function import dict_add_number", "try: test_obj = CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'],", "'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for", "preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict)", "} for recipe in sorted_recipe_list: for mining_research_modifier in mrms: for", "= recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {} if len(available_machine_list)", "import dict_add_number import time def test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict): recipe_obj", "[True, False] am = [1, 101.5] failed_dict['init_failed'] = {} failed_dict['method_failed']", "= [0, 0.3] pm = [None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk", "dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for recipe in recipe_list: try: test_change_machine(test_obj,", "} } for recipe in sorted_recipe_list: for mining_research_modifier in mrms:", "= list(test_obj.block_obj_dict['recipe'].keys()) for recipe in recipe_list: try: test_change_machine(test_obj, recipe, failed_dict)", "False] am = [1, 101.5] failed_dict['init_failed'] = {} failed_dict['method_failed'] =", "recipe in sorted_recipe_list: for mining_research_modifier in mrms: for preferred_machines in", "from FactorioCalcBase.data.binary import sorted_recipe_list, production_machine_category_list_dict from FactorioCalcBase.recipe import Recipe from", "test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise 'MachineNotChanged' def test_calculator_base_methods(test_obj:", "pm = [None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk = [True, False]", "dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return failed_dict def run_test():", "in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise 'MachineNotChanged'", "!= machine: raise 'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict): recipe_list", "try: test_change_machine(test_obj, recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def test_calculator_base(failed_dict):", "import CalculatorBase from FactorioCalcBase.dependency_dict_common_function import dict_add_number import time def test_change_machine(test_obj:", "CalculatorBase, target_recipe, failed_dict): recipe_obj = Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category() available_machine_list", "in mrms: for preferred_machines in pm: for use_kovarex in uk:", "= [True, False] am = [1, 101.5] failed_dict['init_failed'] = {}", "recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for recipe in recipe_list: try: test_change_machine(test_obj, recipe,", "= production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {} if len(available_machine_list) > 1: for", "test_calculator_base(failed_dict): mrms = [0, 0.3] pm = [None, [\"assembling-machine-2\", \"stone-furnace\",", "for recipe in sorted_recipe_list: for mining_research_modifier in mrms: for preferred_machines", "in uk: for amount in am: try: test_obj = CalculatorBase(recipe_name=recipe,", "for machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] != machine:", "for use_kovarex in uk: for amount in am: try: test_obj", "mrms = [0, 0.3] pm = [None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]]", "FactorioCalcBase.data.binary import sorted_recipe_list, production_machine_category_list_dict from FactorioCalcBase.recipe import Recipe from FactorioCalcBase.calculator_base", "sorted_recipe_list, production_machine_category_list_dict from FactorioCalcBase.recipe import Recipe from FactorioCalcBase.calculator_base import CalculatorBase", "test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise 'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict):", "= [None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk = [True, False] am", "101.5] failed_dict['init_failed'] = {} failed_dict['method_failed'] = { 'change_machine_failed': { }", "in pm: for use_kovarex in uk: for amount in am:", "available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise 'MachineNotChanged' def", "for amount in am: try: test_obj = CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines,", "in am: try: test_obj = CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier)", "recipe, 1) def test_calculator_base(failed_dict): mrms = [0, 0.3] pm =", "{} failed_dict['method_failed'] = { 'change_machine_failed': { } } for recipe", "uk: for amount in am: try: test_obj = CalculatorBase(recipe_name=recipe, amount=amount,", "amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict)", "mining_research_modifier in mrms: for preferred_machines in pm: for use_kovarex in", "def test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict): recipe_obj = Recipe(recipe_name=target_recipe) cat =", "machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise", "= CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1)", "\"burner-mining-drill\"]] uk = [True, False] am = [1, 101.5] failed_dict['init_failed']", "recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def test_calculator_base(failed_dict): mrms =", "machine: raise 'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict): recipe_list =", "from FactorioCalcBase.dependency_dict_common_function import dict_add_number import time def test_change_machine(test_obj: CalculatorBase, target_recipe,", "{} if len(available_machine_list) > 1: for machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe,", "return failed_dict def run_test(): start_time = time.time() test_calculator_base({}) print(f'finished in", "use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return", "cat = recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {} if", "available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] = {} if len(available_machine_list) > 1:", "for mining_research_modifier in mrms: for preferred_machines in pm: for use_kovarex", "raise 'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys())", "recipe in recipe_list: try: test_change_machine(test_obj, recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe,", "test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict): recipe_obj = Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category()", "in recipe_list: try: test_change_machine(test_obj, recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1)", "except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return failed_dict def", "FactorioCalcBase.dependency_dict_common_function import dict_add_number import time def test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict):", "[None, [\"assembling-machine-2\", \"stone-furnace\", \"burner-mining-drill\"]] uk = [True, False] am =", "Recipe from FactorioCalcBase.calculator_base import CalculatorBase from FactorioCalcBase.dependency_dict_common_function import dict_add_number import", "test_change_machine(test_obj, recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def test_calculator_base(failed_dict): mrms", "CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj,", "def test_calculator_base_methods(test_obj: CalculatorBase, failed_dict: dict): recipe_list = list(test_obj.block_obj_dict['recipe'].keys()) for recipe", "dict_add_number import time def test_change_machine(test_obj: CalculatorBase, target_recipe, failed_dict): recipe_obj =", "FactorioCalcBase.recipe import Recipe from FactorioCalcBase.calculator_base import CalculatorBase from FactorioCalcBase.dependency_dict_common_function import", "FactorioCalcBase.calculator_base import CalculatorBase from FactorioCalcBase.dependency_dict_common_function import dict_add_number import time def", "{ 'change_machine_failed': { } } for recipe in sorted_recipe_list: for", "list(test_obj.block_obj_dict['recipe'].keys()) for recipe in recipe_list: try: test_change_machine(test_obj, recipe, failed_dict) except:", "am: try: test_obj = CalculatorBase(recipe_name=recipe, amount=amount, preferred_machine_list=preferred_machines, use_kovarex=use_kovarex, mining_research_modifier=mining_research_modifier) except:", "CalculatorBase from FactorioCalcBase.dependency_dict_common_function import dict_add_number import time def test_change_machine(test_obj: CalculatorBase,", "machine_name=machine) if test_obj.block_obj_dict['recipe']['machine_name'] != machine: raise 'MachineNotChanged' def test_calculator_base_methods(test_obj: CalculatorBase,", "len(available_machine_list) > 1: for machine in available_machine_list: test_obj.change_machine_to_specific_block(recipe_name=target_recipe, machine_name=machine) if", "preferred_machines in pm: for use_kovarex in uk: for amount in", "mining_research_modifier=mining_research_modifier) except: dict_add_number(failed_dict['init_failed'], key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return failed_dict", "recipe_list: try: test_change_machine(test_obj, recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'], recipe, 1) def", "for recipe in recipe_list: try: test_change_machine(test_obj, recipe, failed_dict) except: dict_add_number(failed_dict['method_failed']['change_machine_failed'],", "import sorted_recipe_list, production_machine_category_list_dict from FactorioCalcBase.recipe import Recipe from FactorioCalcBase.calculator_base import", "= Recipe(recipe_name=target_recipe) cat = recipe_obj.get_category() available_machine_list = production_machine_category_list_dict.get(cat) failed_dict['method_failed']['change_machine_failed'] =", "am = [1, 101.5] failed_dict['init_failed'] = {} failed_dict['method_failed'] = {", "= {} failed_dict['method_failed'] = { 'change_machine_failed': { } } for", "failed_dict['method_failed'] = { 'change_machine_failed': { } } for recipe in", "key=recipe, val=1) test_calculator_base_methods(test_obj, failed_dict) pprint.pp(failed_dict) return failed_dict def run_test(): start_time", "[1, 101.5] failed_dict['init_failed'] = {} failed_dict['method_failed'] = { 'change_machine_failed': {" ]
[ "elif self.type == UNKNOWN_METHOD: return 'Unknown method' elif self.type ==", "2 EXCEPTION = 3 class TProcessor: \"\"\"Base class for procsessor,", "True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP:", "oprot): pass class TException(Exception): \"\"\"Base class for all thrift exceptions.\"\"\"", "11 STRUCT = 12 MAP = 13 SET = 14", "# Distributed under the Thrift Software License # # See", "= 11 UTF7 = 11 STRUCT = 12 MAP =", "oprot.writeString(self.message) oprot.writeFieldEnd() if self.type != None: oprot.writeFieldBegin('type', TType.I32, 2) oprot.writeI32(self.type)", "self.message: return self.message elif self.type == UNKNOWN_METHOD: return 'Unknown method'", "'Unknown method' elif self.type == INVALID_MESSAGE_TYPE: return 'Invalid message type'", "TProcessor: \"\"\"Base class for procsessor, which works on two streams.\"\"\"", "self.type == UNKNOWN_METHOD: return 'Unknown method' elif self.type == INVALID_MESSAGE_TYPE:", "self.type == MISSING_RESULT: return 'Missing result' else: return 'Default (unknown)", "site at: # http://developers.facebook.com/thrift/ class TType: STOP = 0 VOID", "TType: STOP = 0 VOID = 1 BOOL = 2", "if self.message: return self.message elif self.type == UNKNOWN_METHOD: return 'Unknown", "(fname, ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break", "\"\"\"Application level thrift exceptions.\"\"\" UNKNOWN = 0 UNKNOWN_METHOD = 1", "Exception.__init__(self, message) self.message = message class TApplicationException(TException): \"\"\"Application level thrift", "type' elif self.type == WRONG_METHOD_NAME: return 'Wrong method name' elif", "Copyright (c) 2006- Facebook # Distributed under the Thrift Software", "3 BAD_SEQUENCE_ID = 4 MISSING_RESULT = 5 def __init__(self, type=UNKNOWN,", "iprot): iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if", "3 class TProcessor: \"\"\"Base class for procsessor, which works on", "WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID = 4 MISSING_RESULT = 5 def", "Thrift site at: # http://developers.facebook.com/thrift/ class TType: STOP = 0", "\"\"\"Base class for procsessor, which works on two streams.\"\"\" def", "0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME = 3", "4 MISSING_RESULT = 5 def __init__(self, type=UNKNOWN, message=None): TException.__init__(self, message)", "sequence ID' elif self.type == MISSING_RESULT: return 'Missing result' else:", "else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin('TApplicationException') if self.message", "if ftype == TType.STRING: self.message = iprot.readString(); else: iprot.skip(ftype) elif", "= 6 I32 = 8 I64 = 10 STRING =", "STRUCT = 12 MAP = 13 SET = 14 LIST", "visit the Thrift site at: # http://developers.facebook.com/thrift/ class TType: STOP", "= message class TApplicationException(TException): \"\"\"Application level thrift exceptions.\"\"\" UNKNOWN =", "I16 = 6 I32 = 8 I64 = 10 STRING", "= 11 STRUCT = 12 MAP = 13 SET =", "3 DOUBLE = 4 I16 = 6 I32 = 8", "UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID", "1) oprot.writeString(self.message) oprot.writeFieldEnd() if self.type != None: oprot.writeFieldBegin('type', TType.I32, 2)", "= 15 UTF8 = 16 UTF16 = 17 class TMessageType:", "= 17 class TMessageType: CALL = 1 REPLY = 2", "exceptions.\"\"\" def __init__(self, message=None): Exception.__init__(self, message) self.message = message class", "STRING = 11 UTF7 = 11 STRUCT = 12 MAP", "== BAD_SEQUENCE_ID: return 'Bad sequence ID' elif self.type == MISSING_RESULT:", "DOUBLE = 4 I16 = 6 I32 = 8 I64", "def read(self, iprot): iprot.readStructBegin() while True: (fname, ftype, fid) =", "'Wrong method name' elif self.type == BAD_SEQUENCE_ID: return 'Bad sequence", "iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin('TApplicationException') if", "else: return 'Default (unknown) TApplicationException' def read(self, iprot): iprot.readStructBegin() while", "'Default (unknown) TApplicationException' def read(self, iprot): iprot.readStructBegin() while True: (fname,", "process(iprot, oprot): pass class TException(Exception): \"\"\"Base class for all thrift", "return 'Missing result' else: return 'Default (unknown) TApplicationException' def read(self,", "License # # See accompanying file LICENSE or visit the", "VOID = 1 BOOL = 2 BYTE = 3 I08", "iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin('TApplicationException') if self.message !=", "under the Thrift Software License # # See accompanying file", "1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID = 4", "2 BYTE = 3 I08 = 3 DOUBLE = 4", "message=None): TException.__init__(self, message) self.type = type def __str__(self): if self.message:", "== 2: if ftype == TType.I32: self.type = iprot.readI32(); else:", "iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin('TApplicationException') if self.message != None: oprot.writeFieldBegin('message',", "14 LIST = 15 UTF8 = 16 UTF16 = 17", "5 def __init__(self, type=UNKNOWN, message=None): TException.__init__(self, message) self.type = type", "TException(Exception): \"\"\"Base class for all thrift exceptions.\"\"\" def __init__(self, message=None):", "oprot.writeStructBegin('TApplicationException') if self.message != None: oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd()", "I64 = 10 STRING = 11 UTF7 = 11 STRUCT", "\"\"\"Base class for all thrift exceptions.\"\"\" def __init__(self, message=None): Exception.__init__(self,", "class TException(Exception): \"\"\"Base class for all thrift exceptions.\"\"\" def __init__(self,", "else: iprot.skip(ftype) elif fid == 2: if ftype == TType.I32:", "if self.message != None: oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd() if", "TType.I32: self.type = iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd()", "= iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if ftype", "0 VOID = 1 BOOL = 2 BYTE = 3", "8 I64 = 10 STRING = 11 UTF7 = 11", "'Missing result' else: return 'Default (unknown) TApplicationException' def read(self, iprot):", "ftype == TType.STRING: self.message = iprot.readString(); else: iprot.skip(ftype) elif fid", "= 3 BAD_SEQUENCE_ID = 4 MISSING_RESULT = 5 def __init__(self,", "def __init__(self, type=UNKNOWN, message=None): TException.__init__(self, message) self.type = type def", "== TType.STRING: self.message = iprot.readString(); else: iprot.skip(ftype) elif fid ==", "self.message != None: oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd() if self.type", "Facebook # Distributed under the Thrift Software License # #", "message) self.type = type def __str__(self): if self.message: return self.message", "return 'Unknown method' elif self.type == INVALID_MESSAGE_TYPE: return 'Invalid message", "accompanying file LICENSE or visit the Thrift site at: #", "two streams.\"\"\" def process(iprot, oprot): pass class TException(Exception): \"\"\"Base class", "iprot.readFieldBegin() if ftype == TType.STOP: break if fid == 1:", "oprot): oprot.writeStructBegin('TApplicationException') if self.message != None: oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message)", "if self.type != None: oprot.writeFieldBegin('type', TType.I32, 2) oprot.writeI32(self.type) oprot.writeFieldEnd() oprot.writeFieldStop()", "oprot.writeFieldEnd() if self.type != None: oprot.writeFieldBegin('type', TType.I32, 2) oprot.writeI32(self.type) oprot.writeFieldEnd()", "TType.STRING: self.message = iprot.readString(); else: iprot.skip(ftype) elif fid == 2:", "Distributed under the Thrift Software License # # See accompanying", "message=None): Exception.__init__(self, message) self.message = message class TApplicationException(TException): \"\"\"Application level", "read(self, iprot): iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin()", "REPLY = 2 EXCEPTION = 3 class TProcessor: \"\"\"Base class", "name' elif self.type == BAD_SEQUENCE_ID: return 'Bad sequence ID' elif", "6 I32 = 8 I64 = 10 STRING = 11", "= 1 REPLY = 2 EXCEPTION = 3 class TProcessor:", "1: if ftype == TType.STRING: self.message = iprot.readString(); else: iprot.skip(ftype)", "= 3 DOUBLE = 4 I16 = 6 I32 =", "all thrift exceptions.\"\"\" def __init__(self, message=None): Exception.__init__(self, message) self.message =", "__init__(self, message=None): Exception.__init__(self, message) self.message = message class TApplicationException(TException): \"\"\"Application", "exceptions.\"\"\" UNKNOWN = 0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE = 2", "type=UNKNOWN, message=None): TException.__init__(self, message) self.type = type def __str__(self): if", "elif fid == 2: if ftype == TType.I32: self.type =", "iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin('TApplicationException') if self.message != None:", "pass class TException(Exception): \"\"\"Base class for all thrift exceptions.\"\"\" def", "2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID = 4 MISSING_RESULT = 5", "CALL = 1 REPLY = 2 EXCEPTION = 3 class", "method' elif self.type == INVALID_MESSAGE_TYPE: return 'Invalid message type' elif", "== WRONG_METHOD_NAME: return 'Wrong method name' elif self.type == BAD_SEQUENCE_ID:", "ftype == TType.STOP: break if fid == 1: if ftype", "= 2 EXCEPTION = 3 class TProcessor: \"\"\"Base class for", "class for all thrift exceptions.\"\"\" def __init__(self, message=None): Exception.__init__(self, message)", "TApplicationException(TException): \"\"\"Application level thrift exceptions.\"\"\" UNKNOWN = 0 UNKNOWN_METHOD =", "INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID = 4 MISSING_RESULT", "!= None: oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd() if self.type !=", "17 class TMessageType: CALL = 1 REPLY = 2 EXCEPTION", "elif self.type == MISSING_RESULT: return 'Missing result' else: return 'Default", "self.message = iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if", "if fid == 1: if ftype == TType.STRING: self.message =", "13 SET = 14 LIST = 15 UTF8 = 16", "BAD_SEQUENCE_ID = 4 MISSING_RESULT = 5 def __init__(self, type=UNKNOWN, message=None):", "TApplicationException' def read(self, iprot): iprot.readStructBegin() while True: (fname, ftype, fid)", "= 5 def __init__(self, type=UNKNOWN, message=None): TException.__init__(self, message) self.type =", "class TApplicationException(TException): \"\"\"Application level thrift exceptions.\"\"\" UNKNOWN = 0 UNKNOWN_METHOD", "2: if ftype == TType.I32: self.type = iprot.readI32(); else: iprot.skip(ftype)", "3 I08 = 3 DOUBLE = 4 I16 = 6", "self.type = type def __str__(self): if self.message: return self.message elif", "MAP = 13 SET = 14 LIST = 15 UTF8", "= 2 BYTE = 3 I08 = 3 DOUBLE =", "self.type == INVALID_MESSAGE_TYPE: return 'Invalid message type' elif self.type ==", "MISSING_RESULT: return 'Missing result' else: return 'Default (unknown) TApplicationException' def", "== 1: if ftype == TType.STRING: self.message = iprot.readString(); else:", "def __init__(self, message=None): Exception.__init__(self, message) self.message = message class TApplicationException(TException):", "__str__(self): if self.message: return self.message elif self.type == UNKNOWN_METHOD: return", "SET = 14 LIST = 15 UTF8 = 16 UTF16", "self.message elif self.type == UNKNOWN_METHOD: return 'Unknown method' elif self.type", "else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot): oprot.writeStructBegin('TApplicationException')", "None: oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd() if self.type != None:", "break if fid == 1: if ftype == TType.STRING: self.message", "self.message = message class TApplicationException(TException): \"\"\"Application level thrift exceptions.\"\"\" UNKNOWN", "http://developers.facebook.com/thrift/ class TType: STOP = 0 VOID = 1 BOOL", "for procsessor, which works on two streams.\"\"\" def process(iprot, oprot):", "for all thrift exceptions.\"\"\" def __init__(self, message=None): Exception.__init__(self, message) self.message", "level thrift exceptions.\"\"\" UNKNOWN = 0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE", "INVALID_MESSAGE_TYPE: return 'Invalid message type' elif self.type == WRONG_METHOD_NAME: return", "= iprot.readFieldBegin() if ftype == TType.STOP: break if fid ==", "if ftype == TType.I32: self.type = iprot.readI32(); else: iprot.skip(ftype) else:", "iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self, oprot):", "'Invalid message type' elif self.type == WRONG_METHOD_NAME: return 'Wrong method", "self.type == WRONG_METHOD_NAME: return 'Wrong method name' elif self.type ==", "class TType: STOP = 0 VOID = 1 BOOL =", "WRONG_METHOD_NAME: return 'Wrong method name' elif self.type == BAD_SEQUENCE_ID: return", "elif self.type == INVALID_MESSAGE_TYPE: return 'Invalid message type' elif self.type", "thrift exceptions.\"\"\" def __init__(self, message=None): Exception.__init__(self, message) self.message = message", "thrift exceptions.\"\"\" UNKNOWN = 0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE =", "= 12 MAP = 13 SET = 14 LIST =", "TMessageType: CALL = 1 REPLY = 2 EXCEPTION = 3", "MISSING_RESULT = 5 def __init__(self, type=UNKNOWN, message=None): TException.__init__(self, message) self.type", "EXCEPTION = 3 class TProcessor: \"\"\"Base class for procsessor, which", "== TType.STOP: break if fid == 1: if ftype ==", "fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if fid", "message type' elif self.type == WRONG_METHOD_NAME: return 'Wrong method name'", "oprot.writeFieldBegin('message', TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd() if self.type != None: oprot.writeFieldBegin('type',", "message class TApplicationException(TException): \"\"\"Application level thrift exceptions.\"\"\" UNKNOWN = 0", "which works on two streams.\"\"\" def process(iprot, oprot): pass class", "= iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def write(self,", "UTF7 = 11 STRUCT = 12 MAP = 13 SET", "= 2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID = 4 MISSING_RESULT =", "return 'Invalid message type' elif self.type == WRONG_METHOD_NAME: return 'Wrong", "BAD_SEQUENCE_ID: return 'Bad sequence ID' elif self.type == MISSING_RESULT: return", "type def __str__(self): if self.message: return self.message elif self.type ==", "= 8 I64 = 10 STRING = 11 UTF7 =", "works on two streams.\"\"\" def process(iprot, oprot): pass class TException(Exception):", "return self.message elif self.type == UNKNOWN_METHOD: return 'Unknown method' elif", "streams.\"\"\" def process(iprot, oprot): pass class TException(Exception): \"\"\"Base class for", "result' else: return 'Default (unknown) TApplicationException' def read(self, iprot): iprot.readStructBegin()", "__init__(self, type=UNKNOWN, message=None): TException.__init__(self, message) self.type = type def __str__(self):", "on two streams.\"\"\" def process(iprot, oprot): pass class TException(Exception): \"\"\"Base", "the Thrift site at: # http://developers.facebook.com/thrift/ class TType: STOP =", "= 0 VOID = 1 BOOL = 2 BYTE =", "(c) 2006- Facebook # Distributed under the Thrift Software License", "UNKNOWN = 0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME", "the Thrift Software License # # See accompanying file LICENSE", "# See accompanying file LICENSE or visit the Thrift site", "BYTE = 3 I08 = 3 DOUBLE = 4 I16", "'Bad sequence ID' elif self.type == MISSING_RESULT: return 'Missing result'", "15 UTF8 = 16 UTF16 = 17 class TMessageType: CALL", "return 'Default (unknown) TApplicationException' def read(self, iprot): iprot.readStructBegin() while True:", "# # See accompanying file LICENSE or visit the Thrift", "return 'Wrong method name' elif self.type == BAD_SEQUENCE_ID: return 'Bad", "def __str__(self): if self.message: return self.message elif self.type == UNKNOWN_METHOD:", "I08 = 3 DOUBLE = 4 I16 = 6 I32", "return 'Bad sequence ID' elif self.type == MISSING_RESULT: return 'Missing", "self.type = iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() def", "procsessor, which works on two streams.\"\"\" def process(iprot, oprot): pass", "method name' elif self.type == BAD_SEQUENCE_ID: return 'Bad sequence ID'", "1 REPLY = 2 EXCEPTION = 3 class TProcessor: \"\"\"Base", "TException.__init__(self, message) self.type = type def __str__(self): if self.message: return", "= 14 LIST = 15 UTF8 = 16 UTF16 =", "UNKNOWN_METHOD: return 'Unknown method' elif self.type == INVALID_MESSAGE_TYPE: return 'Invalid", "file LICENSE or visit the Thrift site at: # http://developers.facebook.com/thrift/", "Software License # # See accompanying file LICENSE or visit", "= 13 SET = 14 LIST = 15 UTF8 =", "16 UTF16 = 17 class TMessageType: CALL = 1 REPLY", "== INVALID_MESSAGE_TYPE: return 'Invalid message type' elif self.type == WRONG_METHOD_NAME:", "== TType.I32: self.type = iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd()", "== UNKNOWN_METHOD: return 'Unknown method' elif self.type == INVALID_MESSAGE_TYPE: return", "if ftype == TType.STOP: break if fid == 1: if", "fid == 2: if ftype == TType.I32: self.type = iprot.readI32();", "or visit the Thrift site at: # http://developers.facebook.com/thrift/ class TType:", "iprot.skip(ftype) elif fid == 2: if ftype == TType.I32: self.type", "= type def __str__(self): if self.message: return self.message elif self.type", "STOP = 0 VOID = 1 BOOL = 2 BYTE", "LIST = 15 UTF8 = 16 UTF16 = 17 class", "BOOL = 2 BYTE = 3 I08 = 3 DOUBLE", "= 0 UNKNOWN_METHOD = 1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME =", "LICENSE or visit the Thrift site at: # http://developers.facebook.com/thrift/ class", "11 UTF7 = 11 STRUCT = 12 MAP = 13", "at: # http://developers.facebook.com/thrift/ class TType: STOP = 0 VOID =", "= 16 UTF16 = 17 class TMessageType: CALL = 1", "ftype, fid) = iprot.readFieldBegin() if ftype == TType.STOP: break if", "(unknown) TApplicationException' def read(self, iprot): iprot.readStructBegin() while True: (fname, ftype,", "self.type != None: oprot.writeFieldBegin('type', TType.I32, 2) oprot.writeI32(self.type) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()", "= 4 MISSING_RESULT = 5 def __init__(self, type=UNKNOWN, message=None): TException.__init__(self,", "iprot.readString(); else: iprot.skip(ftype) elif fid == 2: if ftype ==", "= 3 class TProcessor: \"\"\"Base class for procsessor, which works", "I32 = 8 I64 = 10 STRING = 11 UTF7", "while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype ==", "elif self.type == BAD_SEQUENCE_ID: return 'Bad sequence ID' elif self.type", "write(self, oprot): oprot.writeStructBegin('TApplicationException') if self.message != None: oprot.writeFieldBegin('message', TType.STRING, 1)", "def process(iprot, oprot): pass class TException(Exception): \"\"\"Base class for all", "ID' elif self.type == MISSING_RESULT: return 'Missing result' else: return", "# http://developers.facebook.com/thrift/ class TType: STOP = 0 VOID = 1", "10 STRING = 11 UTF7 = 11 STRUCT = 12", "def write(self, oprot): oprot.writeStructBegin('TApplicationException') if self.message != None: oprot.writeFieldBegin('message', TType.STRING,", "# Copyright (c) 2006- Facebook # Distributed under the Thrift", "1 BOOL = 2 BYTE = 3 I08 = 3", "== MISSING_RESULT: return 'Missing result' else: return 'Default (unknown) TApplicationException'", "fid == 1: if ftype == TType.STRING: self.message = iprot.readString();", "Thrift Software License # # See accompanying file LICENSE or", "UTF16 = 17 class TMessageType: CALL = 1 REPLY =", "= 4 I16 = 6 I32 = 8 I64 =", "class TProcessor: \"\"\"Base class for procsessor, which works on two", "message) self.message = message class TApplicationException(TException): \"\"\"Application level thrift exceptions.\"\"\"", "= 10 STRING = 11 UTF7 = 11 STRUCT =", "= 1 INVALID_MESSAGE_TYPE = 2 WRONG_METHOD_NAME = 3 BAD_SEQUENCE_ID =", "4 I16 = 6 I32 = 8 I64 = 10", "elif self.type == WRONG_METHOD_NAME: return 'Wrong method name' elif self.type", "TType.STOP: break if fid == 1: if ftype == TType.STRING:", "TType.STRING, 1) oprot.writeString(self.message) oprot.writeFieldEnd() if self.type != None: oprot.writeFieldBegin('type', TType.I32,", "UTF8 = 16 UTF16 = 17 class TMessageType: CALL =", "self.type == BAD_SEQUENCE_ID: return 'Bad sequence ID' elif self.type ==", "See accompanying file LICENSE or visit the Thrift site at:", "2006- Facebook # Distributed under the Thrift Software License #", "= 3 I08 = 3 DOUBLE = 4 I16 =", "class TMessageType: CALL = 1 REPLY = 2 EXCEPTION =", "iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if ftype", "12 MAP = 13 SET = 14 LIST = 15", "class for procsessor, which works on two streams.\"\"\" def process(iprot,", "ftype == TType.I32: self.type = iprot.readI32(); else: iprot.skip(ftype) else: iprot.skip(ftype)", "= 1 BOOL = 2 BYTE = 3 I08 =" ]
[ "False, False) detected = self._decoder.hyp() if detected: self._decoder.end_utt() self._decoder.start_utt() return", "Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity])", "2.0 (the \"License\"); # you may not use this file", "namedtuple from enum import Enum import numpy as np from", "== 1 def release(self): pass def __str__(self): return 'Snowboy' class", "engine is Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity) elif engine is Engines.SNOWBOY:", "is Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity) elif engine is Engines.SNOWBOY: return", "np.int16 self._decoder.process_raw(pcm.tobytes(), False, False) detected = self._decoder.hyp() if detected: self._decoder.end_utt()", "release(self): self._decoder.end_utt() def __str__(self): return 'PocketSphinx' class PorcupineEngine(Engine): def __init__(self,", "'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if keyword != 'snowboy' else 'snow boy')", "1, 0.1) else: raise ValueError(\"no sensitivity range for '%s'\", engine_type.value)", "from collections import namedtuple from enum import Enum import numpy", "create(engine, keyword, sensitivity): if engine is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity)", "model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) #", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "import numpy as np from pocketsphinx import get_model_path from pocketsphinx.pocketsphinx", "keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity]) def process(self, pcm): assert pcm.dtype", "== np.int16 return self._porcupine.process(pcm) == 0 def release(self): self._porcupine.delete() def", "False) detected = self._decoder.hyp() if detected: self._decoder.end_utt() self._decoder.start_utt() return detected", "else: self._snowboy.ApplyFrontend(False) def process(self, pcm): assert pcm.dtype == np.int16 return", "'%s'\", engine_type.value) @staticmethod def create(engine, keyword, sensitivity): if engine is", "of type '%s'\", engine.value) class PocketSphinxEngine(Engine): def __init__(self, keyword, sensitivity):", "as np from pocketsphinx import get_model_path from pocketsphinx.pocketsphinx import Decoder", "keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str = os.path.join(os.path.dirname(__file__), model_relative_path)", "is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity) elif engine is Engines.PORCUPINE: return", "model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath) self._detector = AudioRecognition(libpath) keywordId =", "'snow boy') config.set_float('-kws_threshold', 10 ** -sensitivity) self._decoder = Decoder(config) self._decoder.start_utt()", "AudioRecognition, FeatureExtractor class Engines(Enum): POCKET_SPHINX = 'PocketSphinx' PORCUPINE = 'Porcupine'", "return 'Snowboy' class NyumayaEngine(Engine): def __init__(self, keyword, sensitivity): #logging.info(\"INIT NYUMAYA\")", "engines import Porcupine from engines import snowboydetect from engines import", "0.1) else: raise ValueError(\"no sensitivity range for '%s'\", engine_type.value) @staticmethod", "use this file except in compliance with the License. #", "return detected def release(self): self._decoder.end_utt() def __str__(self): return 'PocketSphinx' class", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "return SensitivityInfo(0, 1, 0.1) else: raise ValueError(\"no sensitivity range for", "sensitivity range for '%s'\", engine_type.value) @staticmethod def create(engine, keyword, sensitivity):", "from pocketsphinx import get_model_path from pocketsphinx.pocketsphinx import Decoder from engines", "under the License is distributed on an \"AS IS\" BASIS,", "pcm): raise NotImplementedError() def release(self): raise NotImplementedError() def __str__(self): raise", "'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ', '_') model_str", "License for the specific language governing permissions and # limitations", "is Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity) elif engine is Engines.NYUMAYA: return", "10 ** -sensitivity) self._decoder = Decoder(config) self._decoder.start_utt() def process(self, pcm):", "from engines import AudioRecognition, FeatureExtractor class Engines(Enum): POCKET_SPHINX = 'PocketSphinx'", "def process(self, pcm): assert pcm.dtype == np.int16 return self._porcupine.process(pcm) ==", "<gh_stars>0 # # Copyright 2018 Picovoice Inc. # # Licensed", "'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword ==", "np.int16 #logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) == 1 def", "sensitivities=[sensitivity]) def process(self, pcm): assert pcm.dtype == np.int16 return self._porcupine.process(pcm)", "Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15, 3) elif engine_type is Engines.PORCUPINE: return", "import Decoder from engines import Porcupine from engines import snowboydetect", "= 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ', '_')", "raise NotImplementedError() @staticmethod def frame_length(engine_type): if engine_type is Engines.NYUMAYA: return", "in compliance with the License. # You may obtain a", "config.set_float('-kws_threshold', 10 ** -sensitivity) self._decoder = Decoder(config) self._decoder.start_utt() def process(self,", "# limitations under the License. # import os from collections", "software # distributed under the License is distributed on an", "def release(self): self._porcupine.delete() def __str__(self): return 'Porcupine' @property def _repo_path(self):", "1, 0.05) elif engine_type is Engines.NYUMAYA: return SensitivityInfo(0, 1, 0.1)", "elif engine_type is Engines.PORCUPINE: return SensitivityInfo(0, 1, 0.1) elif engine_type", "= self._detector.addModel(model_str,sensitivity) def process(self, pcm): assert pcm.dtype == np.int16 #logging.info(len(pcm))", "PorcupineEngine(Engine): def __init__(self, keyword, sensitivity): self._porcupine = Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'),", "keyword = keyword.lower() if keyword == 'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl'", "if keyword == 'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path =", "return self._snowboy.RunDetection(pcm.tobytes()) == 1 def release(self): pass def __str__(self): return", "512 @staticmethod def sensitivity_info(engine_type): if engine_type is Engines.POCKET_SPHINX: return SensitivityInfo(-21,", "PorcupineEngine(keyword, sensitivity) elif engine is Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity) elif", "elif engine is Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity) elif engine is", "Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity) elif engine is Engines.PORCUPINE: return PorcupineEngine(keyword,", "'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl' % keyword.replace('", "boy') config.set_float('-kws_threshold', 10 ** -sensitivity) self._decoder = Decoder(config) self._decoder.start_utt() def", "License. # import os from collections import namedtuple from enum", "== 0 def release(self): self._porcupine.delete() def __str__(self): return 'Porcupine' @property", "if engine_type is Engines.NYUMAYA: return 1600 else: return 512 @staticmethod", "= snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f'", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "limitations under the License. # import os from collections import", "% keyword model_str = os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath)", "else: raise ValueError(\"no sensitivity range for '%s'\", engine_type.value) @staticmethod def", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "Engine(object): def process(self, pcm): raise NotImplementedError() def release(self): raise NotImplementedError()", "self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword == 'jarvis':", "to in writing, software # distributed under the License is", "# See the License for the specific language governing permissions", "import AudioRecognition, FeatureExtractor class Engines(Enum): POCKET_SPHINX = 'PocketSphinx' PORCUPINE =", "@staticmethod def sensitivity_info(engine_type): if engine_type is Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15,", "self._decoder = Decoder(config) self._decoder.start_utt() def process(self, pcm): assert pcm.dtype ==", "os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if keyword !=", "Porcupine from engines import snowboydetect from engines import AudioRecognition, FeatureExtractor", "process(self, pcm): assert pcm.dtype == np.int16 return self._porcupine.process(pcm) == 0", "language governing permissions and # limitations under the License. #", "class PorcupineEngine(Engine): def __init__(self, keyword, sensitivity): self._porcupine = Porcupine( library_path=os.path.join(self._repo_path,", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "return SensitivityInfo(0, 1, 0.05) elif engine_type is Engines.NYUMAYA: return SensitivityInfo(0,", "'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity]) def process(self,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "with the License. # You may obtain a copy of", "engine_type is Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15, 3) elif engine_type is", "for '%s'\", engine_type.value) @staticmethod def create(engine, keyword, sensitivity): if engine", "return SensitivityInfo(0, 1, 0.1) elif engine_type is Engines.SNOWBOY: return SensitivityInfo(0,", "keyword in {'alexa', 'computer', 'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False)", "SensitivityInfo = namedtuple('SensitivityInfo', 'min, max, step') class Engine(object): def process(self,", "Picovoice Inc. # # Licensed under the Apache License, Version", "def release(self): self._decoder.end_utt() def __str__(self): return 'PocketSphinx' class PorcupineEngine(Engine): def", "raise NotImplementedError() def release(self): raise NotImplementedError() def __str__(self): raise NotImplementedError()", "15, 3) elif engine_type is Engines.PORCUPINE: return SensitivityInfo(0, 1, 0.1)", "model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity]) def process(self, pcm):", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "sensitivity): #logging.info(\"INIT NYUMAYA\") keyword = keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' %", "__str__(self): raise NotImplementedError() @staticmethod def frame_length(engine_type): if engine_type is Engines.NYUMAYA:", "self._snowboy.RunDetection(pcm.tobytes()) == 1 def release(self): pass def __str__(self): return 'Snowboy'", "distributed under the License is distributed on an \"AS IS\"", "create engine of type '%s'\", engine.value) class PocketSphinxEngine(Engine): def __init__(self,", "np from pocketsphinx import get_model_path from pocketsphinx.pocketsphinx import Decoder from", "SensitivityInfo(-21, 15, 3) elif engine_type is Engines.PORCUPINE: return SensitivityInfo(0, 1,", "sensitivity_info(engine_type): if engine_type is Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15, 3) elif", "if keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode())", "pcm.dtype == np.int16 return self._snowboy.RunDetection(pcm.tobytes()) == 1 def release(self): pass", "= Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict'))", "express or implied. # See the License for the specific", "#logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) == 1 def release(self):", "except in compliance with the License. # You may obtain", "self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) == 1 def release(self): pass def __str__(self):", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "keyword, sensitivity): keyword = keyword.lower() if keyword == 'alexa': model_relative_path", "process(self, pcm): raise NotImplementedError() def release(self): raise NotImplementedError() def __str__(self):", "'Porcupine' @property def _repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine): def", "features = self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) == 1 def release(self): pass", "writing, software # distributed under the License is distributed on", "else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in {'alexa', 'computer', 'jarvis', 'view glass'}:", "def __str__(self): raise NotImplementedError() @staticmethod def frame_length(engine_type): if engine_type is", "you may not use this file except in compliance with", "= self._decoder.hyp() if detected: self._decoder.end_utt() self._decoder.start_utt() return detected def release(self):", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) == 1 def release(self): pass def", "import snowboydetect from engines import AudioRecognition, FeatureExtractor class Engines(Enum): POCKET_SPHINX", "1, 0.1) elif engine_type is Engines.SNOWBOY: return SensitivityInfo(0, 1, 0.05)", "os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine): def __init__(self, keyword, sensitivity): keyword =", "elif engine is Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity) elif engine is", "range for '%s'\", engine_type.value) @staticmethod def create(engine, keyword, sensitivity): if", "else: return 512 @staticmethod def sensitivity_info(engine_type): if engine_type is Engines.POCKET_SPHINX:", "engine_type is Engines.NYUMAYA: return SensitivityInfo(0, 1, 0.1) else: raise ValueError(\"no", "collections import namedtuple from enum import Enum import numpy as", "= 'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ', '_') model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode()", "CONDITIONS OF ANY KIND, either express or implied. # See", "keyword, sensitivity): self._porcupine = Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path,", "_repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine): def __init__(self, keyword, sensitivity):", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "class NyumayaEngine(Engine): def __init__(self, keyword, sensitivity): #logging.info(\"INIT NYUMAYA\") keyword =", "Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase',", "else 'snow boy') config.set_float('-kws_threshold', 10 ** -sensitivity) self._decoder = Decoder(config)", "config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if keyword", "== np.int16 return self._snowboy.RunDetection(pcm.tobytes()) == 1 def release(self): pass def", "keyword == 'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl'", "is Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15, 3) elif engine_type is Engines.PORCUPINE:", "engine is Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity) elif engine is Engines.NYUMAYA:", "config = Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(),", "is Engines.PORCUPINE: return SensitivityInfo(0, 1, 0.1) elif engine_type is Engines.SNOWBOY:", "the License. # import os from collections import namedtuple from", "= 'Snowboy' NYUMAYA = 'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo', 'min, max,", "Engines.PORCUPINE: return SensitivityInfo(0, 1, 0.1) elif engine_type is Engines.SNOWBOY: return", "engine_type is Engines.PORCUPINE: return SensitivityInfo(0, 1, 0.1) elif engine_type is", "-sensitivity) self._decoder = Decoder(config) self._decoder.start_utt() def process(self, pcm): assert pcm.dtype", "self._porcupine.process(pcm) == 0 def release(self): self._porcupine.delete() def __str__(self): return 'Porcupine'", "0.1) elif engine_type is Engines.SNOWBOY: return SensitivityInfo(0, 1, 0.05) elif", "OR CONDITIONS OF ANY KIND, either express or implied. #", "self._decoder.end_utt() def __str__(self): return 'PocketSphinx' class PorcupineEngine(Engine): def __init__(self, keyword,", "Decoder from engines import Porcupine from engines import snowboydetect from", "if keyword in {'alexa', 'computer', 'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True) else:", "Engines(Enum): POCKET_SPHINX = 'PocketSphinx' PORCUPINE = 'Porcupine' SNOWBOY = 'Snowboy'", "3) elif engine_type is Engines.PORCUPINE: return SensitivityInfo(0, 1, 0.1) elif", "@staticmethod def frame_length(engine_type): if engine_type is Engines.NYUMAYA: return 1600 else:", "the License is distributed on an \"AS IS\" BASIS, #", "pcm): assert pcm.dtype == np.int16 #logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0) return", "{'alexa', 'computer', 'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def process(self,", "__str__(self): return 'Snowboy' class NyumayaEngine(Engine): def __init__(self, keyword, sensitivity): #logging.info(\"INIT", "model_relative_path = 'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ', '_') model_str = os.path.join(os.path.dirname(__file__),", "sensitivity) elif engine is Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity) elif engine", "keyword, sensitivity): if engine is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity) elif", "os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if keyword != 'snowboy' else 'snow", "frame_length(engine_type): if engine_type is Engines.NYUMAYA: return 1600 else: return 512", "__init__(self, keyword, sensitivity): self._porcupine = Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'),", "self._decoder.start_utt() def process(self, pcm): assert pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(), False,", "keyword, sensitivity): config = Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us'))", "SnowboyEngine(Engine): def __init__(self, keyword, sensitivity): keyword = keyword.lower() if keyword", "1600 else: return 512 @staticmethod def sensitivity_info(engine_type): if engine_type is", "model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str = os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\"", "np.int16 return self._porcupine.process(pcm) == 0 def release(self): self._porcupine.delete() def __str__(self):", "else: ValueError(\"cannot create engine of type '%s'\", engine.value) class PocketSphinxEngine(Engine):", "sensitivity): keyword = keyword.lower() if keyword == 'alexa': model_relative_path =", "def __str__(self): return 'Snowboy' class NyumayaEngine(Engine): def __init__(self, keyword, sensitivity):", "law or agreed to in writing, software # distributed under", "', '_') model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode()", "governing permissions and # limitations under the License. # import", "self._snowboy.ApplyFrontend(False) def process(self, pcm): assert pcm.dtype == np.int16 return self._snowboy.RunDetection(pcm.tobytes())", "enum import Enum import numpy as np from pocketsphinx import", "process(self, pcm): assert pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(), False, False) detected", "class Engine(object): def process(self, pcm): raise NotImplementedError() def release(self): raise", "pcm.dtype == np.int16 return self._porcupine.process(pcm) == 0 def release(self): self._porcupine.delete()", "resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models", "is Engines.NYUMAYA: return SensitivityInfo(0, 1, 0.1) else: raise ValueError(\"no sensitivity", "ValueError(\"cannot create engine of type '%s'\", engine.value) class PocketSphinxEngine(Engine): def", "# # Copyright 2018 Picovoice Inc. # # Licensed under", "max, step') class Engine(object): def process(self, pcm): raise NotImplementedError() def", "libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath) self._detector = AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity)", "sensitivity) elif engine is Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot", "# import os from collections import namedtuple from enum import", "type '%s'\", engine.value) class PocketSphinxEngine(Engine): def __init__(self, keyword, sensitivity): config", "engine_type.value) @staticmethod def create(engine, keyword, sensitivity): if engine is Engines.POCKET_SPHINX:", "FeatureExtractor class Engines(Enum): POCKET_SPHINX = 'PocketSphinx' PORCUPINE = 'Porcupine' SNOWBOY", "= os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if", "= keyword.lower() if keyword == 'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else:", "else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ', '_') model_str =", "= Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())],", "may obtain a copy of the License at # #", "sensitivity): config = Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict',", "= os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename,", "def create(engine, keyword, sensitivity): if engine is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword,", "Decoder(config) self._decoder.start_utt() def process(self, pcm): assert pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(),", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "np.int16 return self._snowboy.RunDetection(pcm.tobytes()) == 1 def release(self): pass def __str__(self):", "release(self): pass def __str__(self): return 'Snowboy' class NyumayaEngine(Engine): def __init__(self,", "may not use this file except in compliance with the", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "NotImplementedError() @staticmethod def frame_length(engine_type): if engine_type is Engines.NYUMAYA: return 1600", "this file except in compliance with the License. # You", "** -sensitivity) self._decoder = Decoder(config) self._decoder.start_utt() def process(self, pcm): assert", "library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity]) def", "assert pcm.dtype == np.int16 return self._porcupine.process(pcm) == 0 def release(self):", "'PocketSphinx' PORCUPINE = 'Porcupine' SNOWBOY = 'Snowboy' NYUMAYA = 'Nyumaya'", "sensitivity) else: ValueError(\"cannot create engine of type '%s'\", engine.value) class", "return NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot create engine of type '%s'\",", "step') class Engine(object): def process(self, pcm): raise NotImplementedError() def release(self):", "is Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot create engine of", "def __init__(self, keyword, sensitivity): config = Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm',", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "SensitivityInfo(0, 1, 0.05) elif engine_type is Engines.NYUMAYA: return SensitivityInfo(0, 1,", "detected def release(self): self._decoder.end_utt() def __str__(self): return 'PocketSphinx' class PorcupineEngine(Engine):", "# # Licensed under the Apache License, Version 2.0 (the", "under the License. # import os from collections import namedtuple", "self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def process(self, pcm): assert pcm.dtype == np.int16", "__str__(self): return 'PocketSphinx' class PorcupineEngine(Engine): def __init__(self, keyword, sensitivity): self._porcupine", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "@property def _repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine): def __init__(self,", "NotImplementedError() def release(self): raise NotImplementedError() def __str__(self): raise NotImplementedError() @staticmethod", "def process(self, pcm): raise NotImplementedError() def release(self): raise NotImplementedError() def", "pocketsphinx.pocketsphinx import Decoder from engines import Porcupine from engines import", "= 'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo', 'min, max, step') class Engine(object):", "= Decoder(config) self._decoder.start_utt() def process(self, pcm): assert pcm.dtype == np.int16", "self._decoder.end_utt() self._decoder.start_utt() return detected def release(self): self._decoder.end_utt() def __str__(self): return", "engine.value) class PocketSphinxEngine(Engine): def __init__(self, keyword, sensitivity): config = Decoder.default_config()", "def __init__(self, keyword, sensitivity): keyword = keyword.lower() if keyword ==", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "pocketsphinx import get_model_path from pocketsphinx.pocketsphinx import Decoder from engines import", "is Engines.NYUMAYA: return 1600 else: return 512 @staticmethod def sensitivity_info(engine_type):", "pass def __str__(self): return 'Snowboy' class NyumayaEngine(Engine): def __init__(self, keyword,", "elif engine_type is Engines.NYUMAYA: return SensitivityInfo(0, 1, 0.1) else: raise", "'engines/porcupine') class SnowboyEngine(Engine): def __init__(self, keyword, sensitivity): keyword = keyword.lower()", "snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f' %", "config.set_string('-keyphrase', keyword if keyword != 'snowboy' else 'snow boy') config.set_float('-kws_threshold',", "def process(self, pcm): assert pcm.dtype == np.int16 return self._snowboy.RunDetection(pcm.tobytes()) ==", "'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str = os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor =", "NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot create engine of type '%s'\", engine.value)", "@staticmethod def create(engine, keyword, sensitivity): if engine is Engines.POCKET_SPHINX: return", "numpy as np from pocketsphinx import get_model_path from pocketsphinx.pocketsphinx import", "assert pcm.dtype == np.int16 return self._snowboy.RunDetection(pcm.tobytes()) == 1 def release(self):", "def release(self): pass def __str__(self): return 'Snowboy' class NyumayaEngine(Engine): def", "engine_type is Engines.SNOWBOY: return SensitivityInfo(0, 1, 0.05) elif engine_type is", "keyword.lower())], sensitivities=[sensitivity]) def process(self, pcm): assert pcm.dtype == np.int16 return", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "return 1600 else: return 512 @staticmethod def sensitivity_info(engine_type): if engine_type", "snowboydetect from engines import AudioRecognition, FeatureExtractor class Engines(Enum): POCKET_SPHINX =", "permissions and # limitations under the License. # import os", "return self._porcupine.process(pcm) == 0 def release(self): self._porcupine.delete() def __str__(self): return", "or implied. # See the License for the specific language", "glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def process(self, pcm): assert pcm.dtype ==", "pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(), False, False) detected = self._decoder.hyp() if", "pcm): assert pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(), False, False) detected =", "model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy =", "pcm.dtype == np.int16 #logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) ==", "return PocketSphinxEngine(keyword, sensitivity) elif engine is Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity)", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "sensitivity) elif engine is Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity) elif engine", "'snowboy' else 'snow boy') config.set_float('-kws_threshold', 10 ** -sensitivity) self._decoder =", "__init__(self, keyword, sensitivity): keyword = keyword.lower() if keyword == 'alexa':", "keyword != 'snowboy' else 'snow boy') config.set_float('-kws_threshold', 10 ** -sensitivity)", "return self._detector.runDetection(features) == 1 def release(self): pass def __str__(self): return", "from enum import Enum import numpy as np from pocketsphinx", "raise NotImplementedError() def __str__(self): raise NotImplementedError() @staticmethod def frame_length(engine_type): if", "PocketSphinxEngine(keyword, sensitivity) elif engine is Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity) elif", "import os from collections import namedtuple from enum import Enum", "engine is Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot create engine", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "'computer', 'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def process(self, pcm):", "Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot create engine of type", "engine of type '%s'\", engine.value) class PocketSphinxEngine(Engine): def __init__(self, keyword,", "ValueError(\"no sensitivity range for '%s'\", engine_type.value) @staticmethod def create(engine, keyword,", "self._detector = AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity) def process(self, pcm): assert", "(the \"License\"); # you may not use this file except", "Engines.NYUMAYA: return SensitivityInfo(0, 1, 0.1) else: raise ValueError(\"no sensitivity range", "model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity,", "FeatureExtractor(libpath) self._detector = AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity) def process(self, pcm):", "# you may not use this file except in compliance", "__str__(self): return 'Porcupine' @property def _repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class", "elif engine is Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity) else: ValueError(\"cannot create", "config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword", "class SnowboyEngine(Engine): def __init__(self, keyword, sensitivity): keyword = keyword.lower() if", "release(self): self._porcupine.delete() def __str__(self): return 'Porcupine' @property def _repo_path(self): return", "release(self): raise NotImplementedError() def __str__(self): raise NotImplementedError() @staticmethod def frame_length(engine_type):", "== np.int16 self._decoder.process_raw(pcm.tobytes(), False, False) detected = self._decoder.hyp() if detected:", "import get_model_path from pocketsphinx.pocketsphinx import Decoder from engines import Porcupine", "pcm): assert pcm.dtype == np.int16 return self._porcupine.process(pcm) == 0 def", "SNOWBOY = 'Snowboy' NYUMAYA = 'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo', 'min,", "NyumayaEngine(Engine): def __init__(self, keyword, sensitivity): #logging.info(\"INIT NYUMAYA\") keyword = keyword.lower()", "# # Unless required by applicable law or agreed to", "= FeatureExtractor(libpath) self._detector = AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity) def process(self,", "import Porcupine from engines import snowboydetect from engines import AudioRecognition,", "engine is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity) elif engine is Engines.PORCUPINE:", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "from pocketsphinx.pocketsphinx import Decoder from engines import Porcupine from engines", "Version 2.0 (the \"License\"); # you may not use this", "__init__(self, keyword, sensitivity): #logging.info(\"INIT NYUMAYA\") keyword = keyword.lower() model_relative_path =", "is Engines.SNOWBOY: return SensitivityInfo(0, 1, 0.05) elif engine_type is Engines.NYUMAYA:", "engines import snowboydetect from engines import AudioRecognition, FeatureExtractor class Engines(Enum):", "keyword = keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str =", "if keyword != 'snowboy' else 'snow boy') config.set_float('-kws_threshold', 10 **", "sensitivity): self._porcupine = Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn'", "= AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity) def process(self, pcm): assert pcm.dtype", "== 'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl' %", "self._detector.runDetection(features) == 1 def release(self): pass def __str__(self): return 'Nyumaya'", "implied. # See the License for the specific language governing", "sensitivity): if engine is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity) elif engine", "Engines.NYUMAYA: return 1600 else: return 512 @staticmethod def sensitivity_info(engine_type): if", "under the Apache License, Version 2.0 (the \"License\"); # you", "process(self, pcm): assert pcm.dtype == np.int16 #logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0)", "self._detector.addModel(model_str,sensitivity) def process(self, pcm): assert pcm.dtype == np.int16 #logging.info(len(pcm)) features", "os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath) self._detector = AudioRecognition(libpath) keywordId", "% (sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in {'alexa', 'computer',", "self._extractor = FeatureExtractor(libpath) self._detector = AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity) def", "PocketSphinxEngine(Engine): def __init__(self, keyword, sensitivity): config = Decoder.default_config() config.set_string('-logfn', '/dev/null')", "by applicable law or agreed to in writing, software #", "'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity]) def process(self, pcm): assert pcm.dtype ==", "import namedtuple from enum import Enum import numpy as np", "https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode()) else:", "assert pcm.dtype == np.int16 #logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features)", "def release(self): raise NotImplementedError() def __str__(self): raise NotImplementedError() @staticmethod def", "Engines.SNOWBOY: return SensitivityInfo(0, 1, 0.05) elif engine_type is Engines.NYUMAYA: return", "0.05) elif engine_type is Engines.NYUMAYA: return SensitivityInfo(0, 1, 0.1) else:", "keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if", "sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in {'alexa', 'computer', 'jarvis', 'view", "detected = self._decoder.hyp() if detected: self._decoder.end_utt() self._decoder.start_utt() return detected def", "return PorcupineEngine(keyword, sensitivity) elif engine is Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity)", "'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if keyword != 'snowboy'", "Engines.SNOWBOY: return SnowboyEngine(keyword, sensitivity) elif engine is Engines.NYUMAYA: return NyumayaEngine(keyword,", "keyword, sensitivity): #logging.info(\"INIT NYUMAYA\") keyword = keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium'", "SnowboyEngine(keyword, sensitivity) elif engine is Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity) else:", "'/dev/null') config.set_string('-hmm', os.path.join(get_model_path(), 'en-us')) config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if", "def __init__(self, keyword, sensitivity): self._porcupine = Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path,", "(sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in {'alexa', 'computer', 'jarvis',", "def __str__(self): return 'PocketSphinx' class PorcupineEngine(Engine): def __init__(self, keyword, sensitivity):", "= 'PocketSphinx' PORCUPINE = 'Porcupine' SNOWBOY = 'Snowboy' NYUMAYA =", "'view glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def process(self, pcm): assert pcm.dtype", "% keyword.lower())], sensitivities=[sensitivity]) def process(self, pcm): assert pcm.dtype == np.int16", "keyword if keyword != 'snowboy' else 'snow boy') config.set_float('-kws_threshold', 10", "# https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword == 'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode())", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "return SensitivityInfo(-21, 15, 3) elif engine_type is Engines.PORCUPINE: return SensitivityInfo(0,", "import Enum import numpy as np from pocketsphinx import get_model_path", "Unless required by applicable law or agreed to in writing,", "from engines import Porcupine from engines import snowboydetect from engines", "'PocketSphinx' class PorcupineEngine(Engine): def __init__(self, keyword, sensitivity): self._porcupine = Porcupine(", "class Engines(Enum): POCKET_SPHINX = 'PocketSphinx' PORCUPINE = 'Porcupine' SNOWBOY =", "SensitivityInfo(0, 1, 0.1) else: raise ValueError(\"no sensitivity range for '%s'\",", "= namedtuple('SensitivityInfo', 'min, max, step') class Engine(object): def process(self, pcm):", "'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def process(self, pcm): assert", "self._decoder.hyp() if detected: self._decoder.end_utt() self._decoder.start_utt() return detected def release(self): self._decoder.end_utt()", "the specific language governing permissions and # limitations under the", "from engines import snowboydetect from engines import AudioRecognition, FeatureExtractor class", "applicable law or agreed to in writing, software # distributed", "os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str) # https://github.com/Kitt-AI/snowboy#pretrained-universal-models if keyword", "0 def release(self): self._porcupine.delete() def __str__(self): return 'Porcupine' @property def", "def frame_length(engine_type): if engine_type is Engines.NYUMAYA: return 1600 else: return", "self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in {'alexa', 'computer', 'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True)", "keyword.replace(' ', '_') model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__),", "in writing, software # distributed under the License is distributed", "self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in {'alexa',", "return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine): def __init__(self, keyword, sensitivity): keyword", "= 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str = os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor", "model_str = os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath) self._detector =", "namedtuple('SensitivityInfo', 'min, max, step') class Engine(object): def process(self, pcm): raise", "'_') model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy", "Enum import numpy as np from pocketsphinx import get_model_path from", "Engines.PORCUPINE: return PorcupineEngine(keyword, sensitivity) elif engine is Engines.SNOWBOY: return SnowboyEngine(keyword,", "self._decoder.process_raw(pcm.tobytes(), False, False) detected = self._decoder.hyp() if detected: self._decoder.end_utt() self._decoder.start_utt()", "PORCUPINE = 'Porcupine' SNOWBOY = 'Snowboy' NYUMAYA = 'Nyumaya' SensitivityInfo", "'Snowboy' NYUMAYA = 'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo', 'min, max, step')", "= keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str = os.path.join(os.path.dirname(__file__),", "if detected: self._decoder.end_utt() self._decoder.start_utt() return detected def release(self): self._decoder.end_utt() def", "SensitivityInfo(0, 1, 0.1) elif engine_type is Engines.SNOWBOY: return SensitivityInfo(0, 1,", "config.set_string('-dict', os.path.join(get_model_path(), 'cmudict-en-us.dict')) config.set_string('-keyphrase', keyword if keyword != 'snowboy' else", "'min, max, step') class Engine(object): def process(self, pcm): raise NotImplementedError()", "= 'Porcupine' SNOWBOY = 'Snowboy' NYUMAYA = 'Nyumaya' SensitivityInfo =", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' % keyword.lower())], sensitivities=[sensitivity]) def process(self, pcm): assert", "'%s'\", engine.value) class PocketSphinxEngine(Engine): def __init__(self, keyword, sensitivity): config =", "== 'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "'Snowboy' class NyumayaEngine(Engine): def __init__(self, keyword, sensitivity): #logging.info(\"INIT NYUMAYA\") keyword", "def process(self, pcm): assert pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(), False, False)", "== np.int16 #logging.info(len(pcm)) features = self._extractor.signalToMel(pcm.tobytes(),1.0) return self._detector.runDetection(features) == 1", "!= 'snowboy' else 'snow boy') config.set_float('-kws_threshold', 10 ** -sensitivity) self._decoder", "self._porcupine.delete() def __str__(self): return 'Porcupine' @property def _repo_path(self): return os.path.join(os.path.dirname(__file__),", "pcm): assert pcm.dtype == np.int16 return self._snowboy.RunDetection(pcm.tobytes()) == 1 def", "self._decoder.start_utt() return detected def release(self): self._decoder.end_utt() def __str__(self): return 'PocketSphinx'", "__init__(self, keyword, sensitivity): config = Decoder.default_config() config.set_string('-logfn', '/dev/null') config.set_string('-hmm', os.path.join(get_model_path(),", "process(self, pcm): assert pcm.dtype == np.int16 return self._snowboy.RunDetection(pcm.tobytes()) == 1", "1 def release(self): pass def __str__(self): return 'Snowboy' class NyumayaEngine(Engine):", "the License for the specific language governing permissions and #", "'jarvis': self._snowboy.SetSensitivity(('%f,%f' % (sensitivity, sensitivity)).encode()) else: self._snowboy.SetSensitivity(str(sensitivity).encode()) if keyword in", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "NotImplementedError() def __str__(self): raise NotImplementedError() @staticmethod def frame_length(engine_type): if engine_type", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "keywordId = self._detector.addModel(model_str,sensitivity) def process(self, pcm): assert pcm.dtype == np.int16", "keyword model_str = os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath) self._detector", "elif engine_type is Engines.SNOWBOY: return SensitivityInfo(0, 1, 0.05) elif engine_type", "return 'PocketSphinx' class PorcupineEngine(Engine): def __init__(self, keyword, sensitivity): self._porcupine =", "AudioRecognition(libpath) keywordId = self._detector.addModel(model_str,sensitivity) def process(self, pcm): assert pcm.dtype ==", "class PocketSphinxEngine(Engine): def __init__(self, keyword, sensitivity): config = Decoder.default_config() config.set_string('-logfn',", "return 512 @staticmethod def sensitivity_info(engine_type): if engine_type is Engines.POCKET_SPHINX: return", "engines import AudioRecognition, FeatureExtractor class Engines(Enum): POCKET_SPHINX = 'PocketSphinx' PORCUPINE", "os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename = os.path.join(os.path.dirname(__file__), 'engines/snowboy/resources/common.res').encode() self._snowboy = snowboydetect.SnowboyDetect(resource_filename=resource_filename, model_str=model_str)", "# Copyright 2018 Picovoice Inc. # # Licensed under the", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo', 'min, max, step') class Engine(object): def", "if engine is Engines.POCKET_SPHINX: return PocketSphinxEngine(keyword, sensitivity) elif engine is", "def _repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine): def __init__(self, keyword,", "= os.path.join(os.path.dirname(__file__), model_relative_path) libpath=\"engines/nyumaya_audio_recognition/lib/linux_x86_64/libnyumaya_premium.so.1.0.0\" self._extractor = FeatureExtractor(libpath) self._detector = AudioRecognition(libpath)", "keyword.lower() if keyword == 'alexa': model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path", "self._porcupine = Porcupine( library_path=os.path.join(self._repo_path, 'lib/linux/x86_64/libpv_porcupine.so'), model_path=os.path.join(self._repo_path, 'lib/common/porcupine_params.pv'), keyword_paths=[os.path.join(self._repo_path, 'resources/keyword_files/linux/%s_linux.ppn' %", "os from collections import namedtuple from enum import Enum import", "% keyword.replace(' ', '_') model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename =", "'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ', '_') model_str = os.path.join(os.path.dirname(__file__), model_relative_path).encode() resource_filename", "\"License\"); # you may not use this file except in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "def __str__(self): return 'Porcupine' @property def _repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine')", "get_model_path from pocketsphinx.pocketsphinx import Decoder from engines import Porcupine from", "def __init__(self, keyword, sensitivity): #logging.info(\"INIT NYUMAYA\") keyword = keyword.lower() model_relative_path", "# distributed under the License is distributed on an \"AS", "2018 Picovoice Inc. # # Licensed under the Apache License,", "if engine_type is Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15, 3) elif engine_type", "detected: self._decoder.end_utt() self._decoder.start_utt() return detected def release(self): self._decoder.end_utt() def __str__(self):", "# Unless required by applicable law or agreed to in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "POCKET_SPHINX = 'PocketSphinx' PORCUPINE = 'Porcupine' SNOWBOY = 'Snowboy' NYUMAYA", "engine_type is Engines.NYUMAYA: return 1600 else: return 512 @staticmethod def", "NYUMAYA\") keyword = keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword model_str", "model_relative_path = 'engines/snowboy/resources/alexa/alexa-avs-sample-app/alexa.umdl' else: model_relative_path = 'engines/snowboy/resources/models/%s.umdl' % keyword.replace(' ',", "'Porcupine' SNOWBOY = 'Snowboy' NYUMAYA = 'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo',", "You may obtain a copy of the License at #", "in {'alexa', 'computer', 'jarvis', 'view glass'}: self._snowboy.ApplyFrontend(True) else: self._snowboy.ApplyFrontend(False) def", "def process(self, pcm): assert pcm.dtype == np.int16 #logging.info(len(pcm)) features =", "return SnowboyEngine(keyword, sensitivity) elif engine is Engines.NYUMAYA: return NyumayaEngine(keyword, sensitivity)", "NYUMAYA = 'Nyumaya' SensitivityInfo = namedtuple('SensitivityInfo', 'min, max, step') class", "assert pcm.dtype == np.int16 self._decoder.process_raw(pcm.tobytes(), False, False) detected = self._decoder.hyp()", "and # limitations under the License. # import os from", "#logging.info(\"INIT NYUMAYA\") keyword = keyword.lower() model_relative_path = 'engines/nyumaya_audio_recognition/models/Hotword/%s_v1.0.0.premium' % keyword", "the Apache License, Version 2.0 (the \"License\"); # you may", "Copyright 2018 Picovoice Inc. # # Licensed under the Apache", "return 'Porcupine' @property def _repo_path(self): return os.path.join(os.path.dirname(__file__), 'engines/porcupine') class SnowboyEngine(Engine):", "raise ValueError(\"no sensitivity range for '%s'\", engine_type.value) @staticmethod def create(engine,", "def sensitivity_info(engine_type): if engine_type is Engines.POCKET_SPHINX: return SensitivityInfo(-21, 15, 3)" ]
[ "it should work with python 2, but I haven't tested", "# there is a difference btw. cls and self, but", "print(\"bye\") if __name__ == '__main__': # test if called as", "or related to getting rid of the visa-handle within thvisa", "be needed, or related to getting rid of the visa-handle", "exception, but hey, maybe it will be needed, or related", "= [] def __new__(cls): # there is a difference btw.", "self, but i don't understand self = super().__new__(cls) InsaneClass._alive.append(self) return", "print(\"enter says hello\") return self def __init__(self): pass def __exit__(self,", "tb):# \"with\" context exit: call del print(\"bye\") if __name__ ==", "i don't understand self = super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self) def", "not work in the with-context \"\"\" # NOTE: This is", "self def __init__(self): pass def __exit__(self, exc_type, exc_value, tb):# \"with\"", "@author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this is probably overkill to alternatively", "with-context \"\"\" # NOTE: This is Python 3 code, it", "as library instance = InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass) #", "cls and self, but i don't understand self = super().__new__(cls)", "def __new__(cls): # there is a difference btw. cls and", "of the visa-handle within thvisa # for some reason, __enter__", "btw. cls and self, but i don't understand self =", "InsaneClass._alive.append(self) return weakref.proxy(self) def commit_suicide(self): self._alive.remove(self) def __enter__(self): print(\"enter says", "# pointer print(InsaneClass().__enter__()) # an object print(\"now, something completely different!\")", "# -*- coding: utf-8 -*- \"\"\" Created on Mon Jan", "if called as executable, not as library instance = InsaneClass()", "weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive = [] def __new__(cls): #", "print(InsaneClass().__enter__()) # an object print(\"now, something completely different!\") with InsaneClass()", "the with-context \"\"\" # NOTE: This is Python 3 code,", "and self, but i don't understand self = super().__new__(cls) InsaneClass._alive.append(self)", "__enter__ does not work in the with-context \"\"\" # NOTE:", "getting rid of the visa-handle within thvisa # for some", "should work with python 2, but I haven't tested it.", "by exception, but hey, maybe it will be needed, or", "exc_value, tb):# \"with\" context exit: call del print(\"bye\") if __name__", "\"\"\" # NOTE: This is Python 3 code, it should", "\"with\" context exit: call del print(\"bye\") if __name__ == '__main__':", "== '__main__': # test if called as executable, not as", "is Python 3 code, it should work with python 2,", "rather than by exception, but hey, maybe it will be", "it. import weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive = [] def", "called as executable, not as library instance = InsaneClass() instance.__enter__()", "# NOTE: This is Python 3 code, it should work", "test if called as executable, not as library instance =", "'__main__': # test if called as executable, not as library", "# for some reason, __enter__ does not work in the", "this is probably overkill to alternatively exit a with-context, rather", "does not work in the with-context \"\"\" # NOTE: This", "utf-8 -*- \"\"\" Created on Mon Jan 20 22:18:58 2020", "haven't tested it. import weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive =", "coding: utf-8 -*- \"\"\" Created on Mon Jan 20 22:18:58", "the visa-handle within thvisa # for some reason, __enter__ does", "Mon Jan 20 22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this", "NOTE: This is Python 3 code, it should work with", "call del print(\"bye\") if __name__ == '__main__': # test if", "class InsaneClass(object): _alive = [] def __new__(cls): # there is", "but I haven't tested it. import weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object):", "it will be needed, or related to getting rid of", "python3 # -*- coding: utf-8 -*- \"\"\" Created on Mon", "some reason, __enter__ does not work in the with-context \"\"\"", "than by exception, but hey, maybe it will be needed,", "# test if called as executable, not as library instance", "instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass) # pointer print(InsaneClass().__enter__()) # an object", "#https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive = [] def __new__(cls): # there", "InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass) # pointer print(InsaneClass().__enter__()) # an", "return self def __init__(self): pass def __exit__(self, exc_type, exc_value, tb):#", "probably overkill to alternatively exit a with-context, rather than by", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on", "an object print(\"now, something completely different!\") with InsaneClass() as i:", "InsaneClass(object): _alive = [] def __new__(cls): # there is a", "weakref.proxy(self) def commit_suicide(self): self._alive.remove(self) def __enter__(self): print(\"enter says hello\") return", "is a difference btw. cls and self, but i don't", "hey, maybe it will be needed, or related to getting", "2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this is probably overkill to", "This is Python 3 code, it should work with python", "not as library instance = InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass)", "for some reason, __enter__ does not work in the with-context", "pointer print(InsaneClass().__enter__()) # an object print(\"now, something completely different!\") with", "-*- coding: utf-8 -*- \"\"\" Created on Mon Jan 20", "self = super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self) def commit_suicide(self): self._alive.remove(self) def", "__enter__(self): print(\"enter says hello\") return self def __init__(self): pass def", "hello\") return self def __init__(self): pass def __exit__(self, exc_type, exc_value,", "= InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass) # pointer print(InsaneClass().__enter__()) #", "__exit__(self, exc_type, exc_value, tb):# \"with\" context exit: call del print(\"bye\")", "I haven't tested it. import weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive", "2, but I haven't tested it. import weakref #https://docs.python.org/3/library/weakref.html class", "with python 2, but I haven't tested it. import weakref", "return weakref.proxy(self) def commit_suicide(self): self._alive.remove(self) def __enter__(self): print(\"enter says hello\")", "context exit: call del print(\"bye\") if __name__ == '__main__': #", "print(InsaneClass) # pointer print(InsaneClass().__enter__()) # an object print(\"now, something completely", "-*- \"\"\" Created on Mon Jan 20 22:18:58 2020 @author:", "import weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive = [] def __new__(cls):", "related to getting rid of the visa-handle within thvisa #", "commit_suicide(self): self._alive.remove(self) def __enter__(self): print(\"enter says hello\") return self def", "difference btw. cls and self, but i don't understand self", "to alternatively exit a with-context, rather than by exception, but", "in the with-context \"\"\" # NOTE: This is Python 3", "rid of the visa-handle within thvisa # for some reason,", "if __name__ == '__main__': # test if called as executable,", "Python 3 code, it should work with python 2, but", "a difference btw. cls and self, but i don't understand", "but hey, maybe it will be needed, or related to", "[] def __new__(cls): # there is a difference btw. cls", "Jan 20 22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this is", "but i don't understand self = super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self)", "@editor: thirschbuechler this is probably overkill to alternatively exit a", "is probably overkill to alternatively exit a with-context, rather than", "tested it. import weakref #https://docs.python.org/3/library/weakref.html class InsaneClass(object): _alive = []", "#print(instance) print(InsaneClass) # pointer print(InsaneClass().__enter__()) # an object print(\"now, something", "def __exit__(self, exc_type, exc_value, tb):# \"with\" context exit: call del", "__name__ == '__main__': # test if called as executable, not", "print(\"now, something completely different!\") with InsaneClass() as i: i.commit_suicide() print(i)", "don't understand self = super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self) def commit_suicide(self):", "# an object print(\"now, something completely different!\") with InsaneClass() as", "exc_type, exc_value, tb):# \"with\" context exit: call del print(\"bye\") if", "exit: call del print(\"bye\") if __name__ == '__main__': # test", "def __enter__(self): print(\"enter says hello\") return self def __init__(self): pass", "del print(\"bye\") if __name__ == '__main__': # test if called", "executable, not as library instance = InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance)", "def __init__(self): pass def __exit__(self, exc_type, exc_value, tb):# \"with\" context", "maybe it will be needed, or related to getting rid", "there is a difference btw. cls and self, but i", "visa-handle within thvisa # for some reason, __enter__ does not", "self._alive.remove(self) def __enter__(self): print(\"enter says hello\") return self def __init__(self):", "__init__(self): pass def __exit__(self, exc_type, exc_value, tb):# \"with\" context exit:", "instance = InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass) # pointer print(InsaneClass().__enter__())", "22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this is probably overkill", "needed, or related to getting rid of the visa-handle within", "3 code, it should work with python 2, but I", "work in the with-context \"\"\" # NOTE: This is Python", "as executable, not as library instance = InsaneClass() instance.__enter__() instance.commit_suicide()", "_alive = [] def __new__(cls): # there is a difference", "__new__(cls): # there is a difference btw. cls and self,", "thvisa # for some reason, __enter__ does not work in", "pass def __exit__(self, exc_type, exc_value, tb):# \"with\" context exit: call", "overkill to alternatively exit a with-context, rather than by exception,", "understand self = super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self) def commit_suicide(self): self._alive.remove(self)", "= super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self) def commit_suicide(self): self._alive.remove(self) def __enter__(self):", "20 22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this is probably", "python 2, but I haven't tested it. import weakref #https://docs.python.org/3/library/weakref.html", "within thvisa # for some reason, __enter__ does not work", "reason, __enter__ does not work in the with-context \"\"\" #", "\"\"\" Created on Mon Jan 20 22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself", "Created on Mon Jan 20 22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor:", "to getting rid of the visa-handle within thvisa # for", "a with-context, rather than by exception, but hey, maybe it", "work with python 2, but I haven't tested it. import", "will be needed, or related to getting rid of the", "says hello\") return self def __init__(self): pass def __exit__(self, exc_type,", "super().__new__(cls) InsaneClass._alive.append(self) return weakref.proxy(self) def commit_suicide(self): self._alive.remove(self) def __enter__(self): print(\"enter", "def commit_suicide(self): self._alive.remove(self) def __enter__(self): print(\"enter says hello\") return self", "library instance = InsaneClass() instance.__enter__() instance.commit_suicide() #print(instance) print(InsaneClass) # pointer", "with-context, rather than by exception, but hey, maybe it will", "instance.commit_suicide() #print(instance) print(InsaneClass) # pointer print(InsaneClass().__enter__()) # an object print(\"now,", "object print(\"now, something completely different!\") with InsaneClass() as i: i.commit_suicide()", "thirschbuechler this is probably overkill to alternatively exit a with-context,", "on Mon Jan 20 22:18:58 2020 @author: https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler", "code, it should work with python 2, but I haven't", "alternatively exit a with-context, rather than by exception, but hey,", "https://stackoverflow.com/questions/293431/python-object-deleting-itself @editor: thirschbuechler this is probably overkill to alternatively exit", "exit a with-context, rather than by exception, but hey, maybe" ]
[ "defect in defects[:, 0, :]: # Each defect is an", "return (num_fingers, img_draw) def segment_arm(frame: np.ndarray, abs_depth_dev: int = 14)", "RGB color image of the segmented arm region with all", "# find median depth value of center region med_val =", "binary image (mask) of a segmented arm region, where arm=255,", "i in defect[:3]] # draw the hull cv2.line(img_draw, tuple(start), tuple(end),", "others red img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) = detect_num_fingers(contour,", "component small_kernel = 3 frame[height // 2 - small_kernel:height //", "int = 14) -> np.ndarray: \"\"\"Segments arm region This method", "return flooded def find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull", "center (21x21 pixel) region of imageheight frame center_half = 10", "convexity defects, possibly no hull found or no # fingers", "segment_arm(frame: np.ndarray, abs_depth_dev: int = 14) -> np.ndarray: \"\"\"Segments arm", "cv2.CHAIN_APPROX_SIMPLE) # find largest area contour max_contour = max(contours, key=cv2.contourArea)", "annotated RGB image \"\"\" # segment arm region segment =", "(num_defects,1,4) for defect in defects[:, 0, :]: # Each defect", "method returns the angle (in radians) between two array-like vectors", "imageheight frame center_half = 10 # half-width of 21 is", "based on a single-channel depth image showing a hand and", "# find the hull of the segmented area, and based", "This method accepts a single-channel depth image of an arm", "find median depth value of center region med_val = np.median(center)", "defect point between two fingers so to get the number", "True) # find convexity hull and defects hull = cv2.convexHull(max_contour,", "center_half = 10 # half-width of 21 is 21/2-1 center", "defects :param img_draw: an RGB color image to be annotated", "recognize(img_gray): \"\"\"Recognizes hand gesture in a single-channel depth image This", "end, far = [contour[i][0] for i in defect[:3]] # draw", "255, flags=4 | (255 << 8)) ret, flooded = cv2.threshold(flood,", "region, where arm=255, else=0 \"\"\" height, width = frame.shape #", "height, width = frame.shape # find center (21x21 pixel) region", "the segmented arm region with all relevant defect points and", "(0, 0, 255), -1) # make sure we cap the", "single-channel depth image :returns: binary image (mask) of segmented arm", "segment_arm(img_gray) # find the hull of the segmented area, and", "np.ndarray, defects: np.ndarray, img_draw: np.ndarray, thresh_deg: float = 80.0) ->", "radians between two vectors This method returns the angle (in", "0, :]: # Each defect is an array of four", "draw the hull cv2.line(img_draw, tuple(start), tuple(end), (0, 255, 0), 2)", "def angle_rad(v1, v2): \"\"\"Angle in radians between two vectors This", "far = [contour[i][0] for i in defect[:3]] # draw the", "threshold, defect point belongs to two # extended fingers if", "kernel) # connected component small_kernel = 3 frame[height // 2", "frame center_half = 10 # half-width of 21 is 21/2-1", "increment number of fingers num_fingers += 1 # draw point", "is below a threshold, defect point belongs to two #", "3.0 or later\" def recognize(img_gray): \"\"\"Recognizes hand gesture in a", "= max(contours, key=cv2.contourArea) epsilon = 0.01 * cv2.arcLength(max_contour, True) max_contour", "max(contours, key=cv2.contourArea) epsilon = 0.01 * cv2.arcLength(max_contour, True) max_contour =", "np.ndarray, thresh_deg: float = 80.0) -> Tuple[int, np.ndarray]: \"\"\"Detects the", "cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) # connected component small_kernel = 3 frame[height", "hull of a segmented arm region. :param segment: a binary", "of extended fingers based on a single-channel depth image showing", "of the segmented arm region with all relevant defect points", "\"\"\"Convert degrees to radians This method converts an angle in", "an angle in radians e[0,2*np.pi) into degrees e[0,360) \"\"\" return", "return [0, img_draw] # if there is a sufficient amount", "value of center region med_val = np.median(center) # try this", "hull cv2.line(img_draw, tuple(start), tuple(end), (0, 255, 0), 2) # if", "containing an algorithm for hand gesture recognition\"\"\" import numpy as", "// 2 + small_kernel, width // 2 - small_kernel:width //", "= np.zeros((height + 2, width + 2), np.uint8) flood =", "extended if defects is None: return [0, img_draw] # we", "frame.copy() cv2.floodFill(flood, mask, (width // 2, height // 2), 255,", "image (mask) of a segmented arm region, where arm=255, else=0", "2 + small_kernel, width // 2 - small_kernel:width // 2", "to get the number of fingers, # start counting at", "belongs to two # extended fingers if angle_rad(start - far,", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\"A module containing", "that find the # convexity defects (contour, defects) = find_hull_defects(segment)", "than the dot-product-acos method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2))", "is an array of four integers. # First three indexes", "- far) < deg2rad(thresh_deg): # increment number of fingers num_fingers", "segmented arm region. It is assumed that the hand is", "Each defect is an array of four integers. # First", "on the contours and convexity # defects, then draw defects", "# draw point as green cv2.circle(img_draw, tuple(far), 5, (0, 255,", "fingers return min(5, num_fingers), img_draw def angle_rad(v1, v2): \"\"\"Angle in", "tuple(start), tuple(end), (0, 255, 0), 2) # if angle is", "that the hand is placed in the center of the", "width = frame.shape # find center (21x21 pixel) region of", "frame: single-channel depth image :returns: binary image (mask) of segmented", "// 2 - small_kernel:width // 2 + small_kernel] = 128", "annotated RGB color image \"\"\" # if there are no", "is a sufficient amount of convexity defects, we will find", "defects, we will find a # defect point between two", "// 2, height // 2), 255, flags=4 | (255 <<", "[0, img_draw] # we assume the wrist will generate two", "a contour and convexity defects. It will annotate an RGB", "Tuple __author__ = \"<NAME>\" __license__ = \"GNU GPL 3.0 or", "cv2.approxPolyDP(max_contour, epsilon, True) # find convexity hull and defects hull", "on that find the # convexity defects (contour, defects) =", "additional defect points, there are no # fingers extended if", "where arm=255, else=0 :returns: (max_contour, defects) the largest contour in", "furthest # points respectively # contour is of shape (num_points,1,2)", "+ 2, width + 2), np.uint8) flood = frame.copy() cv2.floodFill(flood,", "center region med_val = np.median(center) # try this instead: frame", "np.uint8) flood = frame.copy() cv2.floodFill(flood, mask, (width // 2, height", "0), 2) # if angle is below a threshold, defect", "width // 2 - small_kernel:width // 2 + small_kernel] =", "2 - small_kernel:width // 2 + small_kernel] = 128 mask", "# points respectively # contour is of shape (num_points,1,2) -", "contour is of shape (num_points,1,2) - 2 for point coordinates", "# contour is of shape (num_points,1,2) - 2 for point", "angle_rad(start - far, end - far) < deg2rad(thresh_deg): # increment", "radians e[0,2*np.pi) into degrees e[0,360) \"\"\" return angle_deg / 180.0", "instead: frame = np.where(abs(frame - med_val) <= abs_depth_dev, 128, 0).astype(np.uint8)", "img_draw: an RGB color image to be annotated :returns: (num_fingers,", "belong to fingers green, others red img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB)", "<= abs_depth_dev, 128, 0).astype(np.uint8) # morphological kernel = np.ones((3, 3),", "5, (0, 0, 255), -1) # make sure we cap", "area contour max_contour = max(contours, key=cv2.contourArea) epsilon = 0.01 *", "# extended fingers if angle_rad(start - far, end - far)", "np.zeros((height + 2, width + 2), np.uint8) flood = frame.copy()", "the number of fingers, # start counting at 1 num_fingers", "end - far) < deg2rad(thresh_deg): # increment number of fingers", "defect point belongs to two # extended fingers if angle_rad(start", "mask, (width // 2, height // 2), 255, flags=4 |", "cv2.convexityDefects(max_contour, hull) return max_contour, defects def detect_num_fingers(contour: np.ndarray, defects: np.ndarray,", "cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) = detect_num_fingers(contour, defects, img_draw) return (num_fingers, img_draw)", "image and all corresponding defects \"\"\" contours, hierarchy = cv2.findContours(segment,", "# increment number of fingers num_fingers += 1 # draw", "import numpy as np import cv2 from typing import Tuple", "defects is None: return [0, img_draw] # we assume the", "ret, flooded = cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY) return flooded def", "# draw the hull cv2.line(img_draw, tuple(start), tuple(end), (0, 255, 0),", "defects, possibly no hull found or no # fingers extended", "med_val) <= abs_depth_dev, 128, 0).astype(np.uint8) # morphological kernel = np.ones((3,", "two fingers so to get the number of fingers, #", "radians This method converts an angle in radians e[0,2*np.pi) into", "cv2.THRESH_BINARY) return flooded def find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: \"\"\"Find", "flooded def find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull defects", "the number of extended fingers based on a contour and", "assume the wrist will generate two convexity defects (one on", "defects (one on each # side), so if there are", "so to get the number of fingers, # start counting", "find largest area contour max_contour = max(contours, key=cv2.contourArea) epsilon =", "-> np.ndarray: \"\"\"Segments arm region This method accepts a single-channel", ":]: # Each defect is an array of four integers.", "will generate two convexity defects (one on each # side),", "there are no convexity defects, possibly no hull found or", "converts an angle in radians e[0,2*np.pi) into degrees e[0,360) \"\"\"", "more accurate for small angles than the dot-product-acos method. \"\"\"", "gesture recognition\"\"\" import numpy as np import cv2 from typing", "returns the angle (in radians) between two array-like vectors using", "# Each defect is an array of four integers. #", "detect_num_fingers(contour: np.ndarray, defects: np.ndarray, img_draw: np.ndarray, thresh_deg: float = 80.0)", "# fingers extended if defects is None: return [0, img_draw]", "convexity defects, we will find a # defect point between", "possibly no hull found or no # fingers extended if", "of imageheight frame center_half = 10 # half-width of 21", "for i in defect[:3]] # draw the hull cv2.line(img_draw, tuple(start),", "arm region. It is assumed that the hand is placed", "# draw point as red cv2.circle(img_draw, tuple(far), 5, (0, 0,", "small angles than the dot-product-acos method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)),", "python # -*- coding: utf-8 -*- \"\"\"A module containing an", "wrist will generate two convexity defects (one on each #", "arm region. :param segment: a binary image (mask) of a", "image :returns: (num_fingers, img_draw) The estimated number of extended fingers", "sure we cap the number of fingers return min(5, num_fingers),", ":param img_draw: an RGB color image to be annotated :returns:", "annotated :returns: (num_fingers, img_draw) the estimated number of extended fingers", "extended fingers if angle_rad(start - far, end - far) <", "image This method estimates the number of extended fingers based", "if there is a sufficient amount of convexity defects, we", "cv2.floodFill(flood, mask, (width // 2, height // 2), 255, flags=4", "is of shape (num_points,1,2) - 2 for point coordinates start,", "np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2)) def deg2rad(angle_deg): \"\"\"Convert degrees to radians", "np.ndarray]: \"\"\"Find hull defects This method finds all defects in", "defects This method finds all defects in the hull of", "img_draw] # we assume the wrist will generate two convexity", "and extracts the segmented arm region. It is assumed that", "all corresponding defects \"\"\" contours, hierarchy = cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)", "# if there is a sufficient amount of convexity defects,", "frame = np.where(abs(frame - med_val) <= abs_depth_dev, 128, 0).astype(np.uint8) #", "num_fingers = 1 # Defects are of shape (num_defects,1,4) for", "cap the number of fingers return min(5, num_fingers), img_draw def", "small_kernel] = 128 mask = np.zeros((height + 2, width +", ":returns: (num_fingers, img_draw) the estimated number of extended fingers and", "number of fingers, # start counting at 1 num_fingers =", "of fingers return min(5, num_fingers), img_draw def angle_rad(v1, v2): \"\"\"Angle", "method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2)) def deg2rad(angle_deg): \"\"\"Convert", "This method converts an angle in radians e[0,2*np.pi) into degrees", "algorithm for hand gesture recognition\"\"\" import numpy as np import", "// 2 - small_kernel:height // 2 + small_kernel, width //", "else=0 :returns: (max_contour, defects) the largest contour in the image", "It is assumed that the hand is placed in the", "method finds all defects in the hull of a segmented", "fingers depending on the contours and convexity # defects, then", "based on a contour and convexity defects. It will annotate", "convexity defects (contour, defects) = find_hull_defects(segment) # detect the number", "a sufficient amount of convexity defects, we will find a", "255, 0), -1) else: # draw point as red cv2.circle(img_draw,", "This method estimates the number of extended fingers based on", ":returns: binary image (mask) of segmented arm region, where arm=255,", "no hull found or no # fingers extended if defects", "flooded = cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY) return flooded def find_hull_defects(segment:", "img_draw] # if there is a sufficient amount of convexity", "find a # defect point between two fingers so to", "of a segmented arm region. :param segment: a binary image", "point between two fingers so to get the number of", "there are no # fingers extended if len(defects) <= 2:", "are of shape (num_defects,1,4) for defect in defects[:, 0, :]:", "module containing an algorithm for hand gesture recognition\"\"\" import numpy", "-*- coding: utf-8 -*- \"\"\"A module containing an algorithm for", "# fingers extended if len(defects) <= 2: return [0, img_draw]", "np.ndarray: \"\"\"Segments arm region This method accepts a single-channel depth", "\"<NAME>\" __license__ = \"GNU GPL 3.0 or later\" def recognize(img_gray):", "the number of fingers return min(5, num_fingers), img_draw def angle_rad(v1,", "depth image :returns: (num_fingers, img_draw) The estimated number of extended", "- far, end - far) < deg2rad(thresh_deg): # increment number", "find the hull of the segmented area, and based on", "flood = frame.copy() cv2.floodFill(flood, mask, (width // 2, height //", "below a threshold, defect point belongs to two # extended", ":param defects: a list of convexity defects :param img_draw: an", "extended if len(defects) <= 2: return [0, img_draw] # if", "cv2.convexHull(max_contour, returnPoints=False) defects = cv2.convexityDefects(max_contour, hull) return max_contour, defects def", "<= 2: return [0, img_draw] # if there is a", "epsilon, True) # find convexity hull and defects hull =", "// 2 + center_half, width // 2 - center_half:width //", "2 for point coordinates start, end, far = [contour[i][0] for", "= [contour[i][0] for i in defect[:3]] # draw the hull", "the number of extended fingers based on a single-channel depth", "hull defects This method finds all defects in the hull", "find the # convexity defects (contour, defects) = find_hull_defects(segment) #", "= 3 frame[height // 2 - small_kernel:height // 2 +", "80.0) -> Tuple[int, np.ndarray]: \"\"\"Detects the number of extended fingers", "hand region and extracts the segmented arm region. It is", "# if angle is below a threshold, defect point belongs", "a segmented arm region, where arm=255, else=0 :returns: (max_contour, defects)", "2 + center_half, width // 2 - center_half:width // 2", "on a single-channel depth image showing a hand and arm", "segmented arm region, where arm=255, else=0 :returns: (max_contour, defects) the", "points and the hull. :param contours: a list of contours", "of fingers num_fingers += 1 # draw point as green", "are no additional defect points, there are no # fingers", "3), np.uint8) frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) # connected component", "// 2 + center_half] # find median depth value of", "// 2 - center_half:width // 2 + center_half] # find", "\"\"\" # if there are no convexity defects, possibly no", "defects: np.ndarray, img_draw: np.ndarray, thresh_deg: float = 80.0) -> Tuple[int,", "hierarchy = cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find largest area contour", "estimated number of extended fingers and an annotated RGB color", "defect is an array of four integers. # First three", "of extended fingers This method determines the number of extended", "which is more accurate for small angles than the dot-product-acos", "array of four integers. # First three indexes of start,", "numpy as np import cv2 from typing import Tuple __author__", "coding: utf-8 -*- \"\"\"A module containing an algorithm for hand", "height // 2), 255, flags=4 | (255 << 8)) ret,", "import cv2 from typing import Tuple __author__ = \"<NAME>\" __license__", "hull) return max_contour, defects def detect_num_fingers(contour: np.ndarray, defects: np.ndarray, img_draw:", "tuple(far), 5, (0, 255, 0), -1) else: # draw point", "if there are no convexity defects, possibly no hull found", "draw defects that belong to fingers green, others red img_draw", "= 1 # Defects are of shape (num_defects,1,4) for defect", "to radians This method converts an angle in radians e[0,2*np.pi)", "arm region, where arm=255, else=0 :returns: (max_contour, defects) the largest", "annotate an RGB color image of the segmented arm region", "defects, img_draw) return (num_fingers, img_draw) def segment_arm(frame: np.ndarray, abs_depth_dev: int", "an RGB color image of the segmented arm region with", "depending on the contours and convexity # defects, then draw", "defects hull = cv2.convexHull(max_contour, returnPoints=False) defects = cv2.convexityDefects(max_contour, hull) return", "defects = cv2.convexityDefects(max_contour, hull) return max_contour, defects def detect_num_fingers(contour: np.ndarray,", "fingers This method determines the number of extended fingers based", "pixel) region of imageheight frame center_half = 10 # half-width", "2: return [0, img_draw] # if there is a sufficient", "# side), so if there are no additional defect points,", "for point coordinates start, end, far = [contour[i][0] for i", "+ small_kernel] = 128 mask = np.zeros((height + 2, width", "between two fingers so to get the number of fingers,", "image. :param frame: single-channel depth image :returns: binary image (mask)", "with all relevant defect points and the hull. :param contours:", "estimated number of extended fingers and an annotated RGB image", "there are no additional defect points, there are no #", "region and extracts the segmented arm region. It is assumed", "hand is placed in the center of the image. :param", "a single-channel depth image This method estimates the number of", "cv2.arcLength(max_contour, True) max_contour = cv2.approxPolyDP(max_contour, epsilon, True) # find convexity", "# morphological kernel = np.ones((3, 3), np.uint8) frame = cv2.morphologyEx(frame,", "placed in the center of the image. :param frame: single-channel", "__author__ = \"<NAME>\" __license__ = \"GNU GPL 3.0 or later\"", "(max_contour, defects) the largest contour in the image and all", "the wrist will generate two convexity defects (one on each", "is more accurate for small angles than the dot-product-acos method.", "1 # draw point as green cv2.circle(img_draw, tuple(far), 5, (0,", "segmented arm region. :param segment: a binary image (mask) of", "the segmented area, and based on that find the #", "area, and based on that find the # convexity defects", "| (255 << 8)) ret, flooded = cv2.threshold(flood, 129, 255,", "frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) # connected component small_kernel =", "cv2.circle(img_draw, tuple(far), 5, (0, 0, 255), -1) # make sure", "return min(5, num_fingers), img_draw def angle_rad(v1, v2): \"\"\"Angle in radians", "no # fingers extended if defects is None: return [0,", "True) max_contour = cv2.approxPolyDP(max_contour, epsilon, True) # find convexity hull", "returnPoints=False) defects = cv2.convexityDefects(max_contour, hull) return max_contour, defects def detect_num_fingers(contour:", "0.01 * cv2.arcLength(max_contour, True) max_contour = cv2.approxPolyDP(max_contour, epsilon, True) #", "\"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2)) def deg2rad(angle_deg): \"\"\"Convert degrees", "method determines the number of extended fingers based on a", "2), np.uint8) flood = frame.copy() cv2.floodFill(flood, mask, (width // 2,", "2 + center_half] # find median depth value of center", "center_half, width // 2 - center_half:width // 2 + center_half]", "largest contour in the image and all corresponding defects \"\"\"", "# we assume the wrist will generate two convexity defects", "= cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find largest area contour max_contour", "a single-channel depth image showing a hand and arm region.", "largest area contour max_contour = max(contours, key=cv2.contourArea) epsilon = 0.01", "arm region This method accepts a single-channel depth image of", "defects, then draw defects that belong to fingers green, others", "np.ndarray, img_draw: np.ndarray, thresh_deg: float = 80.0) -> Tuple[int, np.ndarray]:", "end and the furthest # points respectively # contour is", "the cross-product method, which is more accurate for small angles", "binary image (mask) of segmented arm region, where arm=255, else=0", "cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find largest area contour max_contour = max(contours,", "an annotated RGB image \"\"\" # segment arm region segment", "segment arm region segment = segment_arm(img_gray) # find the hull", "(one on each # side), so if there are no", "10 # half-width of 21 is 21/2-1 center = frame[height", "accurate for small angles than the dot-product-acos method. \"\"\" return", "= frame[height // 2 - center_half:height // 2 + center_half,", "four integers. # First three indexes of start, end and", "extended fingers and an annotated RGB image \"\"\" # segment", "max_contour = cv2.approxPolyDP(max_contour, epsilon, True) # find convexity hull and", "img_draw) def segment_arm(frame: np.ndarray, abs_depth_dev: int = 14) -> np.ndarray:", "fingers num_fingers += 1 # draw point as green cv2.circle(img_draw,", "all defects in the hull of a segmented arm region.", "in defect[:3]] # draw the hull cv2.line(img_draw, tuple(start), tuple(end), (0,", "an RGB color image to be annotated :returns: (num_fingers, img_draw)", "arm region segment = segment_arm(img_gray) # find the hull of", "return max_contour, defects def detect_num_fingers(contour: np.ndarray, defects: np.ndarray, img_draw: np.ndarray,", "far) < deg2rad(thresh_deg): # increment number of fingers num_fingers +=", "convexity defects :param img_draw: an RGB color image to be", "finds all defects in the hull of a segmented arm", "to be annotated :returns: (num_fingers, img_draw) the estimated number of", "coordinates start, end, far = [contour[i][0] for i in defect[:3]]", "red img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) = detect_num_fingers(contour, defects,", "contours :param defects: a list of convexity defects :param img_draw:", "image :returns: binary image (mask) of segmented arm region, where", ":param contours: a list of contours :param defects: a list", "np.ndarray) -> Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull defects This method finds", "and convexity defects. It will annotate an RGB color image", "as red cv2.circle(img_draw, tuple(far), 5, (0, 0, 255), -1) #", "255, cv2.THRESH_BINARY) return flooded def find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:", "if angle is below a threshold, defect point belongs to", "the center of the image. :param frame: single-channel depth image", "point coordinates start, end, far = [contour[i][0] for i in", "def deg2rad(angle_deg): \"\"\"Convert degrees to radians This method converts an", "1 num_fingers = 1 # Defects are of shape (num_defects,1,4)", "defects in the hull of a segmented arm region. :param", "max_contour, defects def detect_num_fingers(contour: np.ndarray, defects: np.ndarray, img_draw: np.ndarray, thresh_deg:", "vectors This method returns the angle (in radians) between two", "contour in the image and all corresponding defects \"\"\" contours,", "of fingers, # start counting at 1 num_fingers = 1", "vectors using the cross-product method, which is more accurate for", "method accepts a single-channel depth image of an arm and", "(width // 2, height // 2), 255, flags=4 | (255", "a segmented arm region. :param segment: a binary image (mask)", "counting at 1 num_fingers = 1 # Defects are of", "Defects are of shape (num_defects,1,4) for defect in defects[:, 0,", "of shape (num_points,1,2) - 2 for point coordinates start, end,", "two # extended fingers if angle_rad(start - far, end -", "(mask) of segmented arm region, where arm=255, else=0 \"\"\" height,", "fingers extended if defects is None: return [0, img_draw] #", "as np import cv2 from typing import Tuple __author__ =", "21/2-1 center = frame[height // 2 - center_half:height // 2", "The estimated number of extended fingers and an annotated RGB", "an array of four integers. # First three indexes of", "morphological kernel = np.ones((3, 3), np.uint8) frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE,", "= cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) = detect_num_fingers(contour, defects, img_draw) return", "__license__ = \"GNU GPL 3.0 or later\" def recognize(img_gray): \"\"\"Recognizes", "image to be annotated :returns: (num_fingers, img_draw) the estimated number", "# First three indexes of start, end and the furthest", "estimates the number of extended fingers based on a single-channel", "- 2 for point coordinates start, end, far = [contour[i][0]", "num_fingers += 1 # draw point as green cv2.circle(img_draw, tuple(far),", "= frame.shape # find center (21x21 pixel) region of imageheight", "single-channel depth image showing a hand and arm region. :param", "later\" def recognize(img_gray): \"\"\"Recognizes hand gesture in a single-channel depth", "# half-width of 21 is 21/2-1 center = frame[height //", "= 14) -> np.ndarray: \"\"\"Segments arm region This method accepts", "center_half:height // 2 + center_half, width // 2 - center_half:width", "# find largest area contour max_contour = max(contours, key=cv2.contourArea) epsilon", "in radians between two vectors This method returns the angle", "128 mask = np.zeros((height + 2, width + 2), np.uint8)", ":param frame: single-channel depth image :returns: binary image (mask) of", ":returns: (num_fingers, img_draw) The estimated number of extended fingers and", "point belongs to two # extended fingers if angle_rad(start -", "= segment_arm(img_gray) # find the hull of the segmented area,", "hand gesture in a single-channel depth image This method estimates", "contours: a list of contours :param defects: a list of", "an algorithm for hand gesture recognition\"\"\" import numpy as np", "arm and hand region and extracts the segmented arm region.", "small_kernel:height // 2 + small_kernel, width // 2 - small_kernel:width", "that belong to fingers green, others red img_draw = cv2.cvtColor(segment,", "find center (21x21 pixel) region of imageheight frame center_half =", "arm=255, else=0 :returns: (max_contour, defects) the largest contour in the", "// 2 - center_half:height // 2 + center_half, width //", "+ center_half, width // 2 - center_half:width // 2 +", "[contour[i][0] for i in defect[:3]] # draw the hull cv2.line(img_draw,", "determines the number of extended fingers based on a contour", "angle (in radians) between two array-like vectors using the cross-product", "else=0 \"\"\" height, width = frame.shape # find center (21x21", "cv2.line(img_draw, tuple(start), tuple(end), (0, 255, 0), 2) # if angle", "between two array-like vectors using the cross-product method, which is", "of start, end and the furthest # points respectively #", "convexity defects (one on each # side), so if there", "contour and convexity defects. It will annotate an RGB color", "// 2), 255, flags=4 | (255 << 8)) ret, flooded", "in the center of the image. :param frame: single-channel depth", "the hull of a segmented arm region. :param segment: a", "extracts the segmented arm region. It is assumed that the", "(num_points,1,2) - 2 for point coordinates start, end, far =", "arm region with all relevant defect points and the hull.", "hand gesture recognition\"\"\" import numpy as np import cv2 from", "relevant defect points and the hull. :param contours: a list", "no additional defect points, there are no # fingers extended", "= \"GNU GPL 3.0 or later\" def recognize(img_gray): \"\"\"Recognizes hand", "of four integers. # First three indexes of start, end", "segmented arm region with all relevant defect points and the", "\"\"\" # segment arm region segment = segment_arm(img_gray) # find", "red cv2.circle(img_draw, tuple(far), 5, (0, 0, 255), -1) # make", "return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2)) def deg2rad(angle_deg): \"\"\"Convert degrees to", "fingers based on a contour and convexity defects. It will", "image \"\"\" # segment arm region segment = segment_arm(img_gray) #", "a list of contours :param defects: a list of convexity", "v2)), np.dot(v1, v2)) def deg2rad(angle_deg): \"\"\"Convert degrees to radians This", "+= 1 # draw point as green cv2.circle(img_draw, tuple(far), 5,", "small_kernel, width // 2 - small_kernel:width // 2 + small_kernel]", "on a contour and convexity defects. It will annotate an", "return [0, img_draw] # we assume the wrist will generate", "2), 255, flags=4 | (255 << 8)) ret, flooded =", "= cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) # connected component small_kernel = 3", "color image of the segmented arm region with all relevant", "RGB color image to be annotated :returns: (num_fingers, img_draw) the", "each # side), so if there are no additional defect", "= detect_num_fingers(contour, defects, img_draw) return (num_fingers, img_draw) def segment_arm(frame: np.ndarray,", "depth value of center region med_val = np.median(center) # try", "= 128 mask = np.zeros((height + 2, width + 2),", "region segment = segment_arm(img_gray) # find the hull of the", "of center region med_val = np.median(center) # try this instead:", "14) -> np.ndarray: \"\"\"Segments arm region This method accepts a", "Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull defects This method finds all defects", "-1) # make sure we cap the number of fingers", "this instead: frame = np.where(abs(frame - med_val) <= abs_depth_dev, 128,", "median depth value of center region med_val = np.median(center) #", "cv2.MORPH_CLOSE, kernel) # connected component small_kernel = 3 frame[height //", "integers. # First three indexes of start, end and the", "accepts a single-channel depth image of an arm and hand", "import Tuple __author__ = \"<NAME>\" __license__ = \"GNU GPL 3.0", "list of contours :param defects: a list of convexity defects", "# connected component small_kernel = 3 frame[height // 2 -", "number of extended fingers based on a single-channel depth image", "len(defects) <= 2: return [0, img_draw] # if there is", "get the number of fingers, # start counting at 1", "the segmented arm region. It is assumed that the hand", "is placed in the center of the image. :param frame:", "hull and defects hull = cv2.convexHull(max_contour, returnPoints=False) defects = cv2.convexityDefects(max_contour,", "we cap the number of fingers return min(5, num_fingers), img_draw", "This method returns the angle (in radians) between two array-like", "typing import Tuple __author__ = \"<NAME>\" __license__ = \"GNU GPL", "med_val = np.median(center) # try this instead: frame = np.where(abs(frame", "img_gray: single-channel depth image :returns: (num_fingers, img_draw) The estimated number", "= cv2.approxPolyDP(max_contour, epsilon, True) # find convexity hull and defects", "segmented arm region, where arm=255, else=0 \"\"\" height, width =", "8)) ret, flooded = cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY) return flooded", "find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull defects This method", "of segmented arm region, where arm=255, else=0 \"\"\" height, width", "cv2.circle(img_draw, tuple(far), 5, (0, 255, 0), -1) else: # draw", "recognition\"\"\" import numpy as np import cv2 from typing import", "of 21 is 21/2-1 center = frame[height // 2 -", "defects[:, 0, :]: # Each defect is an array of", "defect[:3]] # draw the hull cv2.line(img_draw, tuple(start), tuple(end), (0, 255,", "else: # draw point as red cv2.circle(img_draw, tuple(far), 5, (0,", "of extended fingers and an annotated RGB image \"\"\" #", "hull of the segmented area, and based on that find", "find_hull_defects(segment) # detect the number of fingers depending on the", "128, 0).astype(np.uint8) # morphological kernel = np.ones((3, 3), np.uint8) frame", "the hull. :param contours: a list of contours :param defects:", "e[0,2*np.pi) into degrees e[0,360) \"\"\" return angle_deg / 180.0 *", "color image to be annotated :returns: (num_fingers, img_draw) the estimated", "number of fingers depending on the contours and convexity #", "np.ones((3, 3), np.uint8) frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) # connected", "small_kernel:width // 2 + small_kernel] = 128 mask = np.zeros((height", "extended fingers This method determines the number of extended fingers", "method estimates the number of extended fingers based on a", "# detect the number of fingers depending on the contours", "129, 255, cv2.THRESH_BINARY) return flooded def find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray,", "None: return [0, img_draw] # we assume the wrist will", "an arm and hand region and extracts the segmented arm", "are no # fingers extended if len(defects) <= 2: return", "contours, hierarchy = cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find largest area", "will find a # defect point between two fingers so", "frame.shape # find center (21x21 pixel) region of imageheight frame", "= cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY) return flooded def find_hull_defects(segment: np.ndarray)", "point as green cv2.circle(img_draw, tuple(far), 5, (0, 255, 0), -1)", "and all corresponding defects \"\"\" contours, hierarchy = cv2.findContours(segment, cv2.RETR_TREE,", "= frame.copy() cv2.floodFill(flood, mask, (width // 2, height // 2),", "= np.median(center) # try this instead: frame = np.where(abs(frame -", "\"\"\"Detects the number of extended fingers This method determines the", "three indexes of start, end and the furthest # points", "This method determines the number of extended fingers based on", "in a single-channel depth image This method estimates the number", "shape (num_points,1,2) - 2 for point coordinates start, end, far", "are no convexity defects, possibly no hull found or no", "radians) between two array-like vectors using the cross-product method, which", "\"\"\"A module containing an algorithm for hand gesture recognition\"\"\" import", "fingers green, others red img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw)", "far, end - far) < deg2rad(thresh_deg): # increment number of", "found or no # fingers extended if defects is None:", "a # defect point between two fingers so to get", "GPL 3.0 or later\" def recognize(img_gray): \"\"\"Recognizes hand gesture in", "a hand and arm region. :param img_gray: single-channel depth image", "the hand is placed in the center of the image.", "single-channel depth image This method estimates the number of extended", "# -*- coding: utf-8 -*- \"\"\"A module containing an algorithm", "kernel = np.ones((3, 3), np.uint8) frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel)", "np.uint8) frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) # connected component small_kernel", "region. :param img_gray: single-channel depth image :returns: (num_fingers, img_draw) The", "def recognize(img_gray): \"\"\"Recognizes hand gesture in a single-channel depth image", "be annotated :returns: (num_fingers, img_draw) the estimated number of extended", "make sure we cap the number of fingers return min(5,", "image (mask) of segmented arm region, where arm=255, else=0 \"\"\"", "depth image of an arm and hand region and extracts", "<< 8)) ret, flooded = cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY) return", "start counting at 1 num_fingers = 1 # Defects are", "epsilon = 0.01 * cv2.arcLength(max_contour, True) max_contour = cv2.approxPolyDP(max_contour, epsilon,", "+ 2), np.uint8) flood = frame.copy() cv2.floodFill(flood, mask, (width //", "= \"<NAME>\" __license__ = \"GNU GPL 3.0 or later\" def", "to fingers green, others red img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers,", "for hand gesture recognition\"\"\" import numpy as np import cv2", "or later\" def recognize(img_gray): \"\"\"Recognizes hand gesture in a single-channel", "5, (0, 255, 0), -1) else: # draw point as", "= cv2.convexityDefects(max_contour, hull) return max_contour, defects def detect_num_fingers(contour: np.ndarray, defects:", "if defects is None: return [0, img_draw] # we assume", "(contour, defects) = find_hull_defects(segment) # detect the number of fingers", "arm region, where arm=255, else=0 \"\"\" height, width = frame.shape", "angle in radians e[0,2*np.pi) into degrees e[0,360) \"\"\" return angle_deg", "of the segmented area, and based on that find the", ":param img_gray: single-channel depth image :returns: (num_fingers, img_draw) The estimated", "(in radians) between two array-like vectors using the cross-product method,", "fingers extended if len(defects) <= 2: return [0, img_draw] #", "img_draw def angle_rad(v1, v2): \"\"\"Angle in radians between two vectors", "extended fingers based on a contour and convexity defects. It", "of contours :param defects: a list of convexity defects :param", "0, 255), -1) # make sure we cap the number", "img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) = detect_num_fingers(contour, defects, img_draw)", "img_draw) = detect_num_fingers(contour, defects, img_draw) return (num_fingers, img_draw) def segment_arm(frame:", "is None: return [0, img_draw] # we assume the wrist", "arm region. :param img_gray: single-channel depth image :returns: (num_fingers, img_draw)", "float = 80.0) -> Tuple[int, np.ndarray]: \"\"\"Detects the number of", "image of the segmented arm region with all relevant defect", "based on that find the # convexity defects (contour, defects)", "color image \"\"\" # if there are no convexity defects,", "angle is below a threshold, defect point belongs to two", "RGB color image \"\"\" # if there are no convexity", "is assumed that the hand is placed in the center", "the furthest # points respectively # contour is of shape", "# find convexity hull and defects hull = cv2.convexHull(max_contour, returnPoints=False)", "+ small_kernel, width // 2 - small_kernel:width // 2 +", "defects) = find_hull_defects(segment) # detect the number of fingers depending", "will annotate an RGB color image of the segmented arm", "\"\"\" height, width = frame.shape # find center (21x21 pixel)", "a list of convexity defects :param img_draw: an RGB color", "of convexity defects :param img_draw: an RGB color image to", "and hand region and extracts the segmented arm region. It", "between two vectors This method returns the angle (in radians)", "np import cv2 from typing import Tuple __author__ = \"<NAME>\"", "defects \"\"\" contours, hierarchy = cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find", "convexity defects. It will annotate an RGB color image of", "# Defects are of shape (num_defects,1,4) for defect in defects[:,", "thresh_deg: float = 80.0) -> Tuple[int, np.ndarray]: \"\"\"Detects the number", "try this instead: frame = np.where(abs(frame - med_val) <= abs_depth_dev,", "if angle_rad(start - far, end - far) < deg2rad(thresh_deg): #", "defects def detect_num_fingers(contour: np.ndarray, defects: np.ndarray, img_draw: np.ndarray, thresh_deg: float", "2 - center_half:height // 2 + center_half, width // 2", "0).astype(np.uint8) # morphological kernel = np.ones((3, 3), np.uint8) frame =", "points, there are no # fingers extended if len(defects) <=", "defects: a list of convexity defects :param img_draw: an RGB", "- small_kernel:width // 2 + small_kernel] = 128 mask =", "img_draw: np.ndarray, thresh_deg: float = 80.0) -> Tuple[int, np.ndarray]: \"\"\"Detects", "This method finds all defects in the hull of a", "find convexity hull and defects hull = cv2.convexHull(max_contour, returnPoints=False) defects", "the hull of the segmented area, and based on that", "single-channel depth image of an arm and hand region and", "fingers, # start counting at 1 num_fingers = 1 #", "depth image This method estimates the number of extended fingers", "and an annotated RGB color image \"\"\" # if there", "there is a sufficient amount of convexity defects, we will", "np.dot(v1, v2)) def deg2rad(angle_deg): \"\"\"Convert degrees to radians This method", "flags=4 | (255 << 8)) ret, flooded = cv2.threshold(flood, 129,", "region This method accepts a single-channel depth image of an", "2, height // 2), 255, flags=4 | (255 << 8))", "and convexity # defects, then draw defects that belong to", "width + 2), np.uint8) flood = frame.copy() cv2.floodFill(flood, mask, (width", "degrees to radians This method converts an angle in radians", "defects) the largest contour in the image and all corresponding", "depth image :returns: binary image (mask) of segmented arm region,", "- center_half:height // 2 + center_half, width // 2 -", "region, where arm=255, else=0 :returns: (max_contour, defects) the largest contour", "number of extended fingers based on a contour and convexity", "of extended fingers based on a contour and convexity defects.", "1 # Defects are of shape (num_defects,1,4) for defect in", "arm=255, else=0 \"\"\" height, width = frame.shape # find center", "sufficient amount of convexity defects, we will find a #", "draw point as red cv2.circle(img_draw, tuple(far), 5, (0, 0, 255),", "two convexity defects (one on each # side), so if", "the number of extended fingers This method determines the number", "- small_kernel:height // 2 + small_kernel, width // 2 -", "# defects, then draw defects that belong to fingers green,", "2 + small_kernel] = 128 mask = np.zeros((height + 2,", "or no # fingers extended if defects is None: return", "cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) = detect_num_fingers(contour, defects, img_draw) return (num_fingers,", "hand and arm region. :param img_gray: single-channel depth image :returns:", "image \"\"\" # if there are no convexity defects, possibly", "# try this instead: frame = np.where(abs(frame - med_val) <=", "extended fingers and an annotated RGB color image \"\"\" #", "of convexity defects, we will find a # defect point", "number of extended fingers This method determines the number of", "gesture in a single-channel depth image This method estimates the", "depth image showing a hand and arm region. :param img_gray:", "Tuple[int, np.ndarray]: \"\"\"Detects the number of extended fingers This method", "# defect point between two fingers so to get the", "if len(defects) <= 2: return [0, img_draw] # if there", "First three indexes of start, end and the furthest #", "-> Tuple[int, np.ndarray]: \"\"\"Detects the number of extended fingers This", "points respectively # contour is of shape (num_points,1,2) - 2", "fingers based on a single-channel depth image showing a hand", "of an arm and hand region and extracts the segmented", "number of fingers num_fingers += 1 # draw point as", "region of imageheight frame center_half = 10 # half-width of", "It will annotate an RGB color image of the segmented", "in defects[:, 0, :]: # Each defect is an array", "= np.ones((3, 3), np.uint8) frame = cv2.morphologyEx(frame, cv2.MORPH_CLOSE, kernel) #", "def find_hull_defects(segment: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull defects This", "two vectors This method returns the angle (in radians) between", "the image. :param frame: single-channel depth image :returns: binary image", ":param segment: a binary image (mask) of a segmented arm", "in the hull of a segmented arm region. :param segment:", "corresponding defects \"\"\" contours, hierarchy = cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) #", "the image and all corresponding defects \"\"\" contours, hierarchy =", "(0, 255, 0), 2) # if angle is below a", "# start counting at 1 num_fingers = 1 # Defects", "fingers and an annotated RGB color image \"\"\" # if", "array-like vectors using the cross-product method, which is more accurate", "v2): \"\"\"Angle in radians between two vectors This method returns", "of fingers depending on the contours and convexity # defects,", "defect points, there are no # fingers extended if len(defects)", "abs_depth_dev: int = 14) -> np.ndarray: \"\"\"Segments arm region This", "draw point as green cv2.circle(img_draw, tuple(far), 5, (0, 255, 0),", "green cv2.circle(img_draw, tuple(far), 5, (0, 255, 0), -1) else: #", "np.ndarray]: \"\"\"Detects the number of extended fingers This method determines", "3 frame[height // 2 - small_kernel:height // 2 + small_kernel,", "+ center_half] # find median depth value of center region", "< deg2rad(thresh_deg): # increment number of fingers num_fingers += 1", "RGB image \"\"\" # segment arm region segment = segment_arm(img_gray)", "(num_fingers, img_draw) the estimated number of extended fingers and an", "# segment arm region segment = segment_arm(img_gray) # find the", "a threshold, defect point belongs to two # extended fingers", "= np.where(abs(frame - med_val) <= abs_depth_dev, 128, 0).astype(np.uint8) # morphological", "small_kernel = 3 frame[height // 2 - small_kernel:height // 2", "= find_hull_defects(segment) # detect the number of fingers depending on", "\"\"\"Find hull defects This method finds all defects in the", "connected component small_kernel = 3 frame[height // 2 - small_kernel:height", "in the image and all corresponding defects \"\"\" contours, hierarchy", "the dot-product-acos method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2)) def", "# find center (21x21 pixel) region of imageheight frame center_half", "[0, img_draw] # if there is a sufficient amount of", "utf-8 -*- \"\"\"A module containing an algorithm for hand gesture", "contours and convexity # defects, then draw defects that belong", "the hull cv2.line(img_draw, tuple(start), tuple(end), (0, 255, 0), 2) #", "np.where(abs(frame - med_val) <= abs_depth_dev, 128, 0).astype(np.uint8) # morphological kernel", "defects. It will annotate an RGB color image of the", "# if there are no convexity defects, possibly no hull", "deg2rad(thresh_deg): # increment number of fingers num_fingers += 1 #", "(21x21 pixel) region of imageheight frame center_half = 10 #", "using the cross-product method, which is more accurate for small", "2) # if angle is below a threshold, defect point", "center of the image. :param frame: single-channel depth image :returns:", "where arm=255, else=0 \"\"\" height, width = frame.shape # find", "-*- \"\"\"A module containing an algorithm for hand gesture recognition\"\"\"", "key=cv2.contourArea) epsilon = 0.01 * cv2.arcLength(max_contour, True) max_contour = cv2.approxPolyDP(max_contour,", "in radians e[0,2*np.pi) into degrees e[0,360) \"\"\" return angle_deg /", "segment = segment_arm(img_gray) # find the hull of the segmented", "fingers and an annotated RGB image \"\"\" # segment arm", "img_draw) return (num_fingers, img_draw) def segment_arm(frame: np.ndarray, abs_depth_dev: int =", "no # fingers extended if len(defects) <= 2: return [0,", "\"\"\" contours, hierarchy = cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find largest", "hull found or no # fingers extended if defects is", "then draw defects that belong to fingers green, others red", "a binary image (mask) of a segmented arm region, where", "into degrees e[0,360) \"\"\" return angle_deg / 180.0 * np.pi", "convexity hull and defects hull = cv2.convexHull(max_contour, returnPoints=False) defects =", "we will find a # defect point between two fingers", "= 0.01 * cv2.arcLength(max_contour, True) max_contour = cv2.approxPolyDP(max_contour, epsilon, True)", "and the furthest # points respectively # contour is of", "of a segmented arm region, where arm=255, else=0 :returns: (max_contour,", "a single-channel depth image of an arm and hand region", "half-width of 21 is 21/2-1 center = frame[height // 2", "the largest contour in the image and all corresponding defects", "fingers if angle_rad(start - far, end - far) < deg2rad(thresh_deg):", "extended fingers based on a single-channel depth image showing a", "detect_num_fingers(contour, defects, img_draw) return (num_fingers, img_draw) def segment_arm(frame: np.ndarray, abs_depth_dev:", "(mask) of a segmented arm region, where arm=255, else=0 :returns:", "2 - center_half:width // 2 + center_half] # find median", "width // 2 - center_half:width // 2 + center_half] #", "# convexity defects (contour, defects) = find_hull_defects(segment) # detect the", "(num_fingers, img_draw) The estimated number of extended fingers and an", "image of an arm and hand region and extracts the", "defect points and the hull. :param contours: a list of", "angle_rad(v1, v2): \"\"\"Angle in radians between two vectors This method", "we assume the wrist will generate two convexity defects (one", "255, 0), 2) # if angle is below a threshold,", "and arm region. :param img_gray: single-channel depth image :returns: (num_fingers,", "max_contour = max(contours, key=cv2.contourArea) epsilon = 0.01 * cv2.arcLength(max_contour, True)", "center = frame[height // 2 - center_half:height // 2 +", "hull = cv2.convexHull(max_contour, returnPoints=False) defects = cv2.convexityDefects(max_contour, hull) return max_contour,", "img_draw) the estimated number of extended fingers and an annotated", "- center_half:width // 2 + center_half] # find median depth", "def detect_num_fingers(contour: np.ndarray, defects: np.ndarray, img_draw: np.ndarray, thresh_deg: float =", "angles than the dot-product-acos method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1,", "if there are no additional defect points, there are no", "cross-product method, which is more accurate for small angles than", "generate two convexity defects (one on each # side), so", "method converts an angle in radians e[0,2*np.pi) into degrees e[0,360)", "defects (contour, defects) = find_hull_defects(segment) # detect the number of", "to two # extended fingers if angle_rad(start - far, end", "so if there are no additional defect points, there are", "the angle (in radians) between two array-like vectors using the", "num_fingers), img_draw def angle_rad(v1, v2): \"\"\"Angle in radians between two", "defects that belong to fingers green, others red img_draw =", "number of extended fingers and an annotated RGB image \"\"\"", "at 1 num_fingers = 1 # Defects are of shape", "number of fingers return min(5, num_fingers), img_draw def angle_rad(v1, v2):", ":returns: (max_contour, defects) the largest contour in the image and", "abs_depth_dev, 128, 0).astype(np.uint8) # morphological kernel = np.ones((3, 3), np.uint8)", "and defects hull = cv2.convexHull(max_contour, returnPoints=False) defects = cv2.convexityDefects(max_contour, hull)", "point as red cv2.circle(img_draw, tuple(far), 5, (0, 0, 255), -1)", "segmented area, and based on that find the # convexity", "(num_fingers, img_draw) def segment_arm(frame: np.ndarray, abs_depth_dev: int = 14) ->", "amount of convexity defects, we will find a # defect", "convexity # defects, then draw defects that belong to fingers", "21 is 21/2-1 center = frame[height // 2 - center_half:height", "shape (num_defects,1,4) for defect in defects[:, 0, :]: # Each", "dot-product-acos method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1, v2)), np.dot(v1, v2)) def deg2rad(angle_deg):", "center_half] # find median depth value of center region med_val", "region with all relevant defect points and the hull. :param", "deg2rad(angle_deg): \"\"\"Convert degrees to radians This method converts an angle", "method, which is more accurate for small angles than the", "min(5, num_fingers), img_draw def angle_rad(v1, v2): \"\"\"Angle in radians between", "the number of fingers depending on the contours and convexity", "np.ndarray, abs_depth_dev: int = 14) -> np.ndarray: \"\"\"Segments arm region", "region. It is assumed that the hand is placed in", "(255 << 8)) ret, flooded = cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY)", "cv2.findContours(segment, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # find largest area contour max_contour =", "the contours and convexity # defects, then draw defects that", "frame[height // 2 - small_kernel:height // 2 + small_kernel, width", "of shape (num_defects,1,4) for defect in defects[:, 0, :]: #", "def segment_arm(frame: np.ndarray, abs_depth_dev: int = 14) -> np.ndarray: \"\"\"Segments", "= 10 # half-width of 21 is 21/2-1 center =", "segment: a binary image (mask) of a segmented arm region,", "indexes of start, end and the furthest # points respectively", "image showing a hand and arm region. :param img_gray: single-channel", "all relevant defect points and the hull. :param contours: a", "assumed that the hand is placed in the center of", "for small angles than the dot-product-acos method. \"\"\" return np.arctan2(np.linalg.norm(np.cross(v1,", "and an annotated RGB image \"\"\" # segment arm region", "(0, 255, 0), -1) else: # draw point as red", "center_half:width // 2 + center_half] # find median depth value", "img_draw) The estimated number of extended fingers and an annotated", "start, end, far = [contour[i][0] for i in defect[:3]] #", "showing a hand and arm region. :param img_gray: single-channel depth", "start, end and the furthest # points respectively # contour", "\"\"\"Recognizes hand gesture in a single-channel depth image This method", "- med_val) <= abs_depth_dev, 128, 0).astype(np.uint8) # morphological kernel =", "list of convexity defects :param img_draw: an RGB color image", "respectively # contour is of shape (num_points,1,2) - 2 for", "number of extended fingers and an annotated RGB color image", "cv2.threshold(flood, 129, 255, cv2.THRESH_BINARY) return flooded def find_hull_defects(segment: np.ndarray) ->", "tuple(far), 5, (0, 0, 255), -1) # make sure we", "from typing import Tuple __author__ = \"<NAME>\" __license__ = \"GNU", "as green cv2.circle(img_draw, tuple(far), 5, (0, 255, 0), -1) else:", "two array-like vectors using the cross-product method, which is more", "no convexity defects, possibly no hull found or no #", "is 21/2-1 center = frame[height // 2 - center_half:height //", "contour max_contour = max(contours, key=cv2.contourArea) epsilon = 0.01 * cv2.arcLength(max_contour,", "for defect in defects[:, 0, :]: # Each defect is", "an annotated RGB color image \"\"\" # if there are", "0), -1) else: # draw point as red cv2.circle(img_draw, tuple(far),", "= 80.0) -> Tuple[int, np.ndarray]: \"\"\"Detects the number of extended", "and based on that find the # convexity defects (contour,", "single-channel depth image :returns: (num_fingers, img_draw) The estimated number of", "and the hull. :param contours: a list of contours :param", "\"\"\"Angle in radians between two vectors This method returns the", "= cv2.convexHull(max_contour, returnPoints=False) defects = cv2.convexityDefects(max_contour, hull) return max_contour, defects", "mask = np.zeros((height + 2, width + 2), np.uint8) flood", "side), so if there are no additional defect points, there", "2, width + 2), np.uint8) flood = frame.copy() cv2.floodFill(flood, mask,", "hull. :param contours: a list of contours :param defects: a", "\"GNU GPL 3.0 or later\" def recognize(img_gray): \"\"\"Recognizes hand gesture", "region med_val = np.median(center) # try this instead: frame =", "// 2 + small_kernel] = 128 mask = np.zeros((height +", "\"\"\"Segments arm region This method accepts a single-channel depth image", "region. :param segment: a binary image (mask) of a segmented", "* cv2.arcLength(max_contour, True) max_contour = cv2.approxPolyDP(max_contour, epsilon, True) # find", "the # convexity defects (contour, defects) = find_hull_defects(segment) # detect", "detect the number of fingers depending on the contours and", "-> Tuple[np.ndarray, np.ndarray]: \"\"\"Find hull defects This method finds all", "of extended fingers and an annotated RGB color image \"\"\"", "# make sure we cap the number of fingers return", "the estimated number of extended fingers and an annotated RGB", "green, others red img_draw = cv2.cvtColor(segment, cv2.COLOR_GRAY2RGB) (num_fingers, img_draw) =", "cv2 from typing import Tuple __author__ = \"<NAME>\" __license__ =", "frame[height // 2 - center_half:height // 2 + center_half, width", "255), -1) # make sure we cap the number of", "tuple(end), (0, 255, 0), 2) # if angle is below", "of the image. :param frame: single-channel depth image :returns: binary", "2 - small_kernel:height // 2 + small_kernel, width // 2", "v2)) def deg2rad(angle_deg): \"\"\"Convert degrees to radians This method converts", "fingers so to get the number of fingers, # start", "(num_fingers, img_draw) = detect_num_fingers(contour, defects, img_draw) return (num_fingers, img_draw) def", "on each # side), so if there are no additional", "-1) else: # draw point as red cv2.circle(img_draw, tuple(far), 5,", "np.median(center) # try this instead: frame = np.where(abs(frame - med_val)" ]
[ "print \"If panic happens, wait 10s and reboot device.\" print", "space: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel:", "// You may obtain a copy of the License at", "sys.exit(0) else: print \"Panic Tracer did not get started\" def", "self._kernel_module_parameters += \" exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters += \" exclude_kernel=\"", "wait 100ms, Stop tracing, fetch sideband info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self)", "limitations under the License. ''' \"\"\" PanicLogger RAM-tracing \"\"\" import", "trace data to gbuffer: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-u', '--userspace',", "earlier to stop execution earlier self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\") trace_name,", "\" exclude_kernel=\" + str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize Logger", "Copyright (c) 2015 Intel Corporation // // Licensed under the", "trace_method=1 sideband_log_method=1\" # Add more option to command line input", "help='Exclude kernel: 0=Off(default), 1=On', required=False, default=0) self._parser.add_argument('-d', '--dump', action='store', help='Dump", "Tracer did not get started\" def stop_tracing(self): return def get_data(self):", "name>> to start panic tracing? :\") if trace_name: self.set_trace_path(trace_path, trace_name)", "kernel modules for processing: 0=Off, 1=On(default)', required=False, default=0) self.args =", "panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters += \" panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters", "+= \" panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters += \" exclude_userspace=\" +", "+ str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize Logger base class", "\"\"\" import sys import time from logger import Logger class", "print \"sat-panic-fetch \" + self.trace_name sys.exit(0) else: print \"Panic Tracer", "\" exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters += \" exclude_kernel=\" + str(self.args.kernel)", "print \"Panic Tracer did not get started\" def stop_tracing(self): return", "self._kernel_module_parameters += \" panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters += \" panic_gbuffer=\"", "specific language governing permissions and // limitations under the License.", "RAM-tracing self._kernel_module_parameters += \" trace_method=1 sideband_log_method=1\" # Add more option", "Licensed under the Apache License, Version 2.0 (the \"License\"); //", "and // limitations under the License. ''' \"\"\" PanicLogger RAM-tracing", "under the License is distributed on an \"AS IS\" BASIS,", "for the specific language governing permissions and // limitations under", "# -*- coding: utf-8 -*- ''' // Copyright (c) 2015", "Start tracing, wait 100ms, Stop tracing, fetch sideband info Logger.start_tracing(self)", "help='Panic tracing mode: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-g', '--gbuffer', action='store',", "-*- coding: utf-8 -*- ''' // Copyright (c) 2015 Intel", "License for the specific language governing permissions and // limitations", "str(self.args.sideband) self._kernel_module_parameters += \" panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters += \"", "// you may not use this file except in compliance", "for processing: 0=Off, 1=On(default)', required=False, default=0) self.args = self._parser.parse_args() self._kernel_module_parameters", "to stop execution earlier self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path", "the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required", "License is distributed on an \"AS IS\" BASIS, // WITHOUT", "2.0 (the \"License\"); // you may not use this file", "trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info() # TODO Problem, there is no", "tracing, wait 100ms, Stop tracing, fetch sideband info Logger.start_tracing(self) time.sleep(0.2)", "\"AS IS\" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY", "language governing permissions and // limitations under the License. '''", "def __init__(self, control): # Base class init call Logger.__init__(self, control)", "utf-8 -*- ''' // Copyright (c) 2015 Intel Corporation //", "tracing mode: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump", "// // Unless required by applicable law or agreed to", "\"Panic Tracer did not get started\" def stop_tracing(self): return def", "class init call Logger.__init__(self, control) # Add default kernel module", "agreed to in writing, software // distributed under the License", "License. ''' \"\"\" PanicLogger RAM-tracing \"\"\" import sys import time", "start panic tracing? :\") if trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info() #", "express or implied. // See the License for the specific", "0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel: 0=Off(default),", "http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed", "\" panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters += \" panic_sideband=\" + str(self.args.sideband)", "print \"Panic tracing activated\" print \"If panic happens, wait 10s", "// distributed under the License is distributed on an \"AS", "control) # Add default kernel module parameter for RAM-tracing self._kernel_module_parameters", "writing, software // distributed under the License is distributed on", "# Add more option to command line input self._parser.add_argument('-p', '--panic',", "<filename>satt/trace/logger/panic.py #!/usr/bin/env python # -*- coding: utf-8 -*- ''' //", "ANY KIND, either express or implied. // See the License", "time from logger import Logger class PanicLogger(Logger): \"\"\" Panic logger", "self.get_trace_name(\"Enter <<trace name>> to start panic tracing? :\") if trace_name:", "data to gbuffer: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-u', '--userspace', action='store',", "+ self.trace_name sys.exit(0) else: print \"Panic Tracer did not get", "under the Apache License, Version 2.0 (the \"License\"); // you", "str(self.args.userspace) self._kernel_module_parameters += \" exclude_kernel=\" + str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\")", "Problem, there is no Sideband.bin info yet # Quick Fix", "100ms, Stop tracing, fetch sideband info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "CONDITIONS OF ANY KIND, either express or implied. // See", "applicable law or agreed to in writing, software // distributed", "See the License for the specific language governing permissions and", "Quick Fix # Start tracing, wait 100ms, Stop tracing, fetch", "required=False, default=1) self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel: 0=Off(default), 1=On', required=False,", "trace_name) self.get_build_info() # TODO Problem, there is no Sideband.bin info", "not get started\" def stop_tracing(self): return def get_data(self): return def", "device.\" print \"\" print \"When device boot up run following", "no Sideband.bin info yet # Quick Fix # Start tracing,", "required by applicable law or agreed to in writing, software", "import Logger class PanicLogger(Logger): \"\"\" Panic logger \"\"\" def __init__(self,", "device boot up run following command:\" print \"sat-panic-fetch \" +", "# Add default kernel module parameter for RAM-tracing self._kernel_module_parameters +=", "help='Panic tracing mode: 1=Normal, 2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s', '--sideband', action='store',", "Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print \"\" print \"Panic tracing", "tracing, fetch sideband info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel()", "activated\" print \"If panic happens, wait 10s and reboot device.\"", "implied. // See the License for the specific language governing", "to command line input self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing mode:", "OR CONDITIONS OF ANY KIND, either express or implied. //", "is no Sideband.bin info yet # Quick Fix # Start", "or agreed to in writing, software // distributed under the", "earlier self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path = self.get_trace_name(\"Enter <<trace", "str(self.args.panic) self._kernel_module_parameters += \" panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters += \"", "option to command line input self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing", "distributed under the License is distributed on an \"AS IS\"", "+= \" trace_method=1 sideband_log_method=1\" # Add more option to command", "base class Logger.initialize(self) # Call start_tracing earlier to stop execution", "of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless", "Logger.__init__(self, control) # Add default kernel module parameter for RAM-tracing", "on an \"AS IS\" BASIS, // WITHOUT WARRANTIES OR CONDITIONS", "# Start tracing, wait 100ms, Stop tracing, fetch sideband info", "initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize Logger base class Logger.initialize(self) # Call", "\"License\"); // you may not use this file except in", "print \"\" print \"Panic tracing activated\" print \"If panic happens,", "not use this file except in compliance with the License.", "Sideband.bin info yet # Quick Fix # Start tracing, wait", "you may not use this file except in compliance with", "# Initialize Logger base class Logger.initialize(self) # Call start_tracing earlier", "self.trace_name sys.exit(0) else: print \"Panic Tracer did not get started\"", "self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing mode: 0=Off, 1=On(default)', required=False, default=1)", "default kernel module parameter for RAM-tracing self._kernel_module_parameters += \" trace_method=1", "help='Exclude user space: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-k', '--kernel', action='store',", "if trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info() # TODO Problem, there is", "to in writing, software // distributed under the License is", "governing permissions and // limitations under the License. ''' \"\"\"", "0=Off, 1=On(default)', required=False, default=0) self.args = self._parser.parse_args() self._kernel_module_parameters += \"", "self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path = self.get_trace_name(\"Enter <<trace name>>", "# Quick Fix # Start tracing, wait 100ms, Stop tracing,", "panic happens, wait 10s and reboot device.\" print \"\" print", "tracing activated\" print \"If panic happens, wait 10s and reboot", "1=On(default)', required=False, default=1) self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel: 0=Off(default), 1=On',", "Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print", "\"\"\" Panic logger \"\"\" def __init__(self, control): # Base class", "coding: utf-8 -*- ''' // Copyright (c) 2015 Intel Corporation", "happens, wait 10s and reboot device.\" print \"\" print \"When", "''' \"\"\" PanicLogger RAM-tracing \"\"\" import sys import time from", "'--sideband', action='store', help='Panic tracing mode: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-g',", "software // distributed under the License is distributed on an", "Add default kernel module parameter for RAM-tracing self._kernel_module_parameters += \"", "// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or", "user space: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-k', '--kernel', action='store', help='Exclude", "default=2) self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing mode: 0=Off, 1=On(default)', required=False,", "1=On', required=False, default=0) self._parser.add_argument('-d', '--dump', action='store', help='Dump kernel and kernel", "to gbuffer: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-u', '--userspace', action='store', help='Exclude", "0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user space:", "line input self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing mode: 1=Normal, 2=Hooked(default)',", "action='store', help='Panic tracing mode: 1=Normal, 2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s', '--sideband',", "trace_path = self.get_trace_name(\"Enter <<trace name>> to start panic tracing? :\")", "default=1) self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace data to gbuffer: 0=Off,", "execution earlier self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path = self.get_trace_name(\"Enter", "the License. ''' \"\"\" PanicLogger RAM-tracing \"\"\" import sys import", "for RAM-tracing self._kernel_module_parameters += \" trace_method=1 sideband_log_method=1\" # Add more", "action='store', help='Exclude user space: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-k', '--kernel',", "call Logger.__init__(self, control) # Add default kernel module parameter for", "+ str(self.args.sideband) self._kernel_module_parameters += \" panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters +=", "tracing mode: 1=Normal, 2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s', '--sideband', action='store', help='Panic", "\"sat-panic-fetch \" + self.trace_name sys.exit(0) else: print \"Panic Tracer did", "// See the License for the specific language governing permissions", "// Licensed under the Apache License, Version 2.0 (the \"License\");", ":\") if trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info() # TODO Problem, there", "in compliance with the License. // You may obtain a", "\" panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters += \" panic_gbuffer=\" + str(self.args.gbuffer)", "logger \"\"\" def __init__(self, control): # Base class init call", "// Copyright (c) 2015 Intel Corporation // // Licensed under", "Version 2.0 (the \"License\"); // you may not use this", "input self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing mode: 1=Normal, 2=Hooked(default)', required=False,", "stop execution earlier self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path =", "1=Normal, 2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing mode:", "0=Off(default), 1=On', required=False, default=0) self._parser.add_argument('-d', '--dump', action='store', help='Dump kernel and", "required=False, default=1) self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace data to gbuffer:", "sideband info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules()", "to start panic tracing? :\") if trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info()", "-*- ''' // Copyright (c) 2015 Intel Corporation // //", "(c) 2015 Intel Corporation // // Licensed under the Apache", "self.args = self._parser.parse_args() self._kernel_module_parameters += \" panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters", "Stop tracing, fetch sideband info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self)", "may not use this file except in compliance with the", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "action='store', help='Panic tracing mode: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-g', '--gbuffer',", "KIND, either express or implied. // See the License for", "this file except in compliance with the License. // You", "info yet # Quick Fix # Start tracing, wait 100ms,", "+= \" panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters += \" panic_gbuffer=\" +", "modules for processing: 0=Off, 1=On(default)', required=False, default=0) self.args = self._parser.parse_args()", "kernel and kernel modules for processing: 0=Off, 1=On(default)', required=False, default=0)", "\"If panic happens, wait 10s and reboot device.\" print \"\"", "in writing, software // distributed under the License is distributed", "'--gbuffer', action='store', help='Dump trace data to gbuffer: 0=Off, 1=On(default)', required=False,", "'--panic', action='store', help='Panic tracing mode: 1=Normal, 2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s',", "Add more option to command line input self._parser.add_argument('-p', '--panic', action='store',", "'--userspace', action='store', help='Exclude user space: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-k',", "+ str(self.args.panic) self._kernel_module_parameters += \" panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters +=", "started\" def stop_tracing(self): return def get_data(self): return def get_trace_data(self): return", "self._parser.parse_args() self._kernel_module_parameters += \" panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters += \"", "2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing mode: 0=Off,", "action='store', help='Dump kernel and kernel modules for processing: 0=Off, 1=On(default)',", "class PanicLogger(Logger): \"\"\" Panic logger \"\"\" def __init__(self, control): #", "self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user space: 0=Off, 1=On(default)', required=False, default=1)", "Initialize Logger base class Logger.initialize(self) # Call start_tracing earlier to", "panic_sideband=\" + str(self.args.sideband) self._kernel_module_parameters += \" panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters", "tracing? :\") if trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info() # TODO Problem,", "info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self)", "str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize Logger base class Logger.initialize(self)", "and reboot device.\" print \"\" print \"When device boot up", "# Base class init call Logger.__init__(self, control) # Add default", "time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print \"\" print \"Panic", "self._parser.add_argument('-d', '--dump', action='store', help='Dump kernel and kernel modules for processing:", "parameter for RAM-tracing self._kernel_module_parameters += \" trace_method=1 sideband_log_method=1\" # Add", "'--kernel', action='store', help='Exclude kernel: 0=Off(default), 1=On', required=False, default=0) self._parser.add_argument('-d', '--dump',", "default=1) self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel: 0=Off(default), 1=On', required=False, default=0)", "(the \"License\"); // you may not use this file except", "action='store', help='Exclude kernel: 0=Off(default), 1=On', required=False, default=0) self._parser.add_argument('-d', '--dump', action='store',", "an \"AS IS\" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF", "1=On(default)', required=False, default=0) self.args = self._parser.parse_args() self._kernel_module_parameters += \" panic_tracer=\"", "def initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize Logger base class Logger.initialize(self) #", "License. // You may obtain a copy of the License", "default=0) self.args = self._parser.parse_args() self._kernel_module_parameters += \" panic_tracer=\" + str(self.args.panic)", "1=On(default)', required=False, default=1) self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user space: 0=Off,", "Fix # Start tracing, wait 100ms, Stop tracing, fetch sideband", "License, Version 2.0 (the \"License\"); // you may not use", "obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0", "at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable", "self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace data to gbuffer: 0=Off, 1=On(default)',", "self._kernel_module_parameters += \" exclude_kernel=\" + str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\") #", "may obtain a copy of the License at // //", "OF ANY KIND, either express or implied. // See the", "mode: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace", "<<trace name>> to start panic tracing? :\") if trace_name: self.set_trace_path(trace_path,", "panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters += \" exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters", "default=1) self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user space: 0=Off, 1=On(default)', required=False,", "+ str(self.args.userspace) self._kernel_module_parameters += \" exclude_kernel=\" + str(self.args.kernel) def initialize(self):", "required=False, default=0) self.args = self._parser.parse_args() self._kernel_module_parameters += \" panic_tracer=\" +", "'--dump', action='store', help='Dump kernel and kernel modules for processing: 0=Off,", "Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print \"\" print", "BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "// limitations under the License. ''' \"\"\" PanicLogger RAM-tracing \"\"\"", "start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path = self.get_trace_name(\"Enter <<trace name>> to start", "trace_name, trace_path = self.get_trace_name(\"Enter <<trace name>> to start panic tracing?", "get started\" def stop_tracing(self): return def get_data(self): return def get_trace_data(self):", "= self.get_trace_name(\"Enter <<trace name>> to start panic tracing? :\") if", "following command:\" print \"sat-panic-fetch \" + self.trace_name sys.exit(0) else: print", "RAM-tracing \"\"\" import sys import time from logger import Logger", "self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print \"\" print \"Panic tracing activated\"", "else: print \"Panic Tracer did not get started\" def stop_tracing(self):", "Logger class PanicLogger(Logger): \"\"\" Panic logger \"\"\" def __init__(self, control):", "exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters += \" exclude_kernel=\" + str(self.args.kernel) def", "exclude_kernel=\" + str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize Logger base", "time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print \"\"", "command line input self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing mode: 1=Normal,", "self.dump_linux_gate() self.dump_kernel_modules() Logger.start_tracing(self) print \"\" print \"Panic tracing activated\" print", "\"Panic tracing activated\" print \"If panic happens, wait 10s and", "// Unless required by applicable law or agreed to in", "control): # Base class init call Logger.__init__(self, control) # Add", "either express or implied. // See the License for the", "and kernel modules for processing: 0=Off, 1=On(default)', required=False, default=0) self.args", "\" panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters += \" exclude_userspace=\" + str(self.args.userspace)", "\" trace_method=1 sideband_log_method=1\" # Add more option to command line", "+= \" exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters += \" exclude_kernel=\" +", "the License for the specific language governing permissions and //", "self._kernel_module_parameters += \" panic_gbuffer=\" + str(self.args.gbuffer) self._kernel_module_parameters += \" exclude_userspace=\"", "self.set_trace_path(trace_path, trace_name) self.get_build_info() # TODO Problem, there is no Sideband.bin", "import time from logger import Logger class PanicLogger(Logger): \"\"\" Panic", "run following command:\" print \"sat-panic-fetch \" + self.trace_name sys.exit(0) else:", "help='Dump trace data to gbuffer: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-u',", "self._debug_print(\"PanicLogger::initialize\") # Initialize Logger base class Logger.initialize(self) # Call start_tracing", "more option to command line input self._parser.add_argument('-p', '--panic', action='store', help='Panic", "there is no Sideband.bin info yet # Quick Fix #", "permissions and // limitations under the License. ''' \"\"\" PanicLogger", "is distributed on an \"AS IS\" BASIS, // WITHOUT WARRANTIES", "the License is distributed on an \"AS IS\" BASIS, //", "import sys import time from logger import Logger class PanicLogger(Logger):", "panic tracing? :\") if trace_name: self.set_trace_path(trace_path, trace_name) self.get_build_info() # TODO", "10s and reboot device.\" print \"\" print \"When device boot", "Unless required by applicable law or agreed to in writing,", "except in compliance with the License. // You may obtain", "by applicable law or agreed to in writing, software //", "sys import time from logger import Logger class PanicLogger(Logger): \"\"\"", "// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "the Apache License, Version 2.0 (the \"License\"); // you may", "under the License. ''' \"\"\" PanicLogger RAM-tracing \"\"\" import sys", "module parameter for RAM-tracing self._kernel_module_parameters += \" trace_method=1 sideband_log_method=1\" #", "Logger base class Logger.initialize(self) # Call start_tracing earlier to stop", "or implied. // See the License for the specific language", "+ str(self.args.gbuffer) self._kernel_module_parameters += \" exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters +=", "IS\" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "use this file except in compliance with the License. //", "\"\" print \"When device boot up run following command:\" print", "up run following command:\" print \"sat-panic-fetch \" + self.trace_name sys.exit(0)", "reboot device.\" print \"\" print \"When device boot up run", "python # -*- coding: utf-8 -*- ''' // Copyright (c)", "Corporation // // Licensed under the Apache License, Version 2.0", "PanicLogger(Logger): \"\"\" Panic logger \"\"\" def __init__(self, control): # Base", "\" + self.trace_name sys.exit(0) else: print \"Panic Tracer did not", "# TODO Problem, there is no Sideband.bin info yet #", "// // Licensed under the Apache License, Version 2.0 (the", "kernel module parameter for RAM-tracing self._kernel_module_parameters += \" trace_method=1 sideband_log_method=1\"", "start_tracing earlier to stop execution earlier self.start_tracing() def start_tracing(self): self._debug_print(\"start_tracing\")", "with the License. // You may obtain a copy of", "compliance with the License. // You may obtain a copy", "Panic logger \"\"\" def __init__(self, control): # Base class init", "a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 //", "Apache License, Version 2.0 (the \"License\"); // you may not", "required=False, default=2) self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing mode: 0=Off, 1=On(default)',", "1=On(default)', required=False, default=1) self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace data to", "self._debug_print(\"start_tracing\") trace_name, trace_path = self.get_trace_name(\"Enter <<trace name>> to start panic", "the specific language governing permissions and // limitations under the", "command:\" print \"sat-panic-fetch \" + self.trace_name sys.exit(0) else: print \"Panic", "#!/usr/bin/env python # -*- coding: utf-8 -*- ''' // Copyright", "file except in compliance with the License. // You may", "boot up run following command:\" print \"sat-panic-fetch \" + self.trace_name", "PanicLogger RAM-tracing \"\"\" import sys import time from logger import", "print \"When device boot up run following command:\" print \"sat-panic-fetch", "''' // Copyright (c) 2015 Intel Corporation // // Licensed", "self.dump_kernel_modules() Logger.start_tracing(self) print \"\" print \"Panic tracing activated\" print \"If", "+= \" exclude_kernel=\" + str(self.args.kernel) def initialize(self): self._debug_print(\"PanicLogger::initialize\") # Initialize", "sideband_log_method=1\" # Add more option to command line input self._parser.add_argument('-p',", "You may obtain a copy of the License at //", "TODO Problem, there is no Sideband.bin info yet # Quick", "# Call start_tracing earlier to stop execution earlier self.start_tracing() def", "= self._parser.parse_args() self._kernel_module_parameters += \" panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters +=", "Logger.initialize(self) # Call start_tracing earlier to stop execution earlier self.start_tracing()", "// // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law", "self._kernel_module_parameters += \" trace_method=1 sideband_log_method=1\" # Add more option to", "Intel Corporation // // Licensed under the Apache License, Version", "fetch sideband info Logger.start_tracing(self) time.sleep(0.2) Logger.stop_tracing(self) time.sleep(0.2) Logger.get_sideband_data(self) self.dump_kernel() self.dump_linux_gate()", "__init__(self, control): # Base class init call Logger.__init__(self, control) #", "help='Dump kernel and kernel modules for processing: 0=Off, 1=On(default)', required=False,", "2015 Intel Corporation // // Licensed under the Apache License,", "processing: 0=Off, 1=On(default)', required=False, default=0) self.args = self._parser.parse_args() self._kernel_module_parameters +=", "self._parser.add_argument('-p', '--panic', action='store', help='Panic tracing mode: 1=Normal, 2=Hooked(default)', required=False, default=2)", "copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // //", "kernel: 0=Off(default), 1=On', required=False, default=0) self._parser.add_argument('-d', '--dump', action='store', help='Dump kernel", "Logger.start_tracing(self) print \"\" print \"Panic tracing activated\" print \"If panic", "self._parser.add_argument('-k', '--kernel', action='store', help='Exclude kernel: 0=Off(default), 1=On', required=False, default=0) self._parser.add_argument('-d',", "self._kernel_module_parameters += \" panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters += \" panic_sideband=\"", "yet # Quick Fix # Start tracing, wait 100ms, Stop", "0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-g', '--gbuffer', action='store', help='Dump trace data", "logger import Logger class PanicLogger(Logger): \"\"\" Panic logger \"\"\" def", "mode: 1=Normal, 2=Hooked(default)', required=False, default=2) self._parser.add_argument('-s', '--sideband', action='store', help='Panic tracing", "\"\" print \"Panic tracing activated\" print \"If panic happens, wait", "distributed on an \"AS IS\" BASIS, // WITHOUT WARRANTIES OR", "required=False, default=1) self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user space: 0=Off, 1=On(default)',", "init call Logger.__init__(self, control) # Add default kernel module parameter", "\"\"\" def __init__(self, control): # Base class init call Logger.__init__(self,", "wait 10s and reboot device.\" print \"\" print \"When device", "action='store', help='Dump trace data to gbuffer: 0=Off, 1=On(default)', required=False, default=1)", "\"When device boot up run following command:\" print \"sat-panic-fetch \"", "self.get_build_info() # TODO Problem, there is no Sideband.bin info yet", "Base class init call Logger.__init__(self, control) # Add default kernel", "gbuffer: 0=Off, 1=On(default)', required=False, default=1) self._parser.add_argument('-u', '--userspace', action='store', help='Exclude user", "str(self.args.gbuffer) self._kernel_module_parameters += \" exclude_userspace=\" + str(self.args.userspace) self._kernel_module_parameters += \"", "+= \" panic_tracer=\" + str(self.args.panic) self._kernel_module_parameters += \" panic_sideband=\" +", "License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by", "required=False, default=0) self._parser.add_argument('-d', '--dump', action='store', help='Dump kernel and kernel modules", "did not get started\" def stop_tracing(self): return def get_data(self): return", "default=0) self._parser.add_argument('-d', '--dump', action='store', help='Dump kernel and kernel modules for", "from logger import Logger class PanicLogger(Logger): \"\"\" Panic logger \"\"\"", "class Logger.initialize(self) # Call start_tracing earlier to stop execution earlier", "Call start_tracing earlier to stop execution earlier self.start_tracing() def start_tracing(self):", "def start_tracing(self): self._debug_print(\"start_tracing\") trace_name, trace_path = self.get_trace_name(\"Enter <<trace name>> to", "the License. // You may obtain a copy of the", "print \"\" print \"When device boot up run following command:\"", "\"\"\" PanicLogger RAM-tracing \"\"\" import sys import time from logger", "law or agreed to in writing, software // distributed under" ]
[ "sys.argv[1] args = sys.argv[2:] if command == 'project': exe =", "os.getcwd() dirs = filesys.Directories() dirs.set_root(root) def main(): if len(sys.argv) <=", "Options: positional arguments: command project \"\"\" def project(args): if len(args)", "positional arguments: command project \"\"\" def project(args): if len(args) !=", "arguments.\") exit() if args[0] == 'init': root = os.getcwd() dirs", "len(args) != 1: print(\"error: Invalid arguments.\") exit() if args[0] ==", "command == 'project': exe = project else: print(\"error: No command", "xlab command ... Options: positional arguments: command project \"\"\" def", "!= 1: print(\"error: Invalid arguments.\") exit() if args[0] == 'init':", "print(MAIN_USAGE_MESSAGE) exit() command = sys.argv[1] args = sys.argv[2:] if command", "MAIN_USAGE_MESSAGE = \"\"\" usage: xlab command ... Options: positional arguments:", "\"\"\" usage: xlab command ... Options: positional arguments: command project", "project(args): if len(args) != 1: print(\"error: Invalid arguments.\") exit() if", "if args[0] == 'init': root = os.getcwd() dirs = filesys.Directories()", "project \"\"\" def project(args): if len(args) != 1: print(\"error: Invalid", "command project \"\"\" def project(args): if len(args) != 1: print(\"error:", "if len(args) != 1: print(\"error: Invalid arguments.\") exit() if args[0]", "from . import filesys MAIN_USAGE_MESSAGE = \"\"\" usage: xlab command", "filesys MAIN_USAGE_MESSAGE = \"\"\" usage: xlab command ... Options: positional", "Invalid arguments.\") exit() if args[0] == 'init': root = os.getcwd()", "= filesys.Directories() dirs.set_root(root) def main(): if len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE)", "1: print(MAIN_USAGE_MESSAGE) exit() command = sys.argv[1] args = sys.argv[2:] if", "= project else: print(\"error: No command 'xlab {}'.\".format(command)) exit() exe(args)", "filesys.Directories() dirs.set_root(root) def main(): if len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE) exit()", "dirs = filesys.Directories() dirs.set_root(root) def main(): if len(sys.argv) <= 1:", "\"\"\" def project(args): if len(args) != 1: print(\"error: Invalid arguments.\")", "if command == 'project': exe = project else: print(\"error: No", "dirs.set_root(root) def main(): if len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE) exit() command", "exe = project else: print(\"error: No command 'xlab {}'.\".format(command)) exit()", "args[0] == 'init': root = os.getcwd() dirs = filesys.Directories() dirs.set_root(root)", "'init': root = os.getcwd() dirs = filesys.Directories() dirs.set_root(root) def main():", "sys.argv[2:] if command == 'project': exe = project else: print(\"error:", "exit() if args[0] == 'init': root = os.getcwd() dirs =", "= sys.argv[2:] if command == 'project': exe = project else:", "1: print(\"error: Invalid arguments.\") exit() if args[0] == 'init': root", "= \"\"\" usage: xlab command ... Options: positional arguments: command", "args = sys.argv[2:] if command == 'project': exe = project", "print(\"error: Invalid arguments.\") exit() if args[0] == 'init': root =", "arguments: command project \"\"\" def project(args): if len(args) != 1:", "import filesys MAIN_USAGE_MESSAGE = \"\"\" usage: xlab command ... Options:", "= os.getcwd() dirs = filesys.Directories() dirs.set_root(root) def main(): if len(sys.argv)", "exit() command = sys.argv[1] args = sys.argv[2:] if command ==", "<= 1: print(MAIN_USAGE_MESSAGE) exit() command = sys.argv[1] args = sys.argv[2:]", "'project': exe = project else: print(\"error: No command 'xlab {}'.\".format(command))", "== 'project': exe = project else: print(\"error: No command 'xlab", "if len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE) exit() command = sys.argv[1] args", "def main(): if len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE) exit() command =", "def project(args): if len(args) != 1: print(\"error: Invalid arguments.\") exit()", "... Options: positional arguments: command project \"\"\" def project(args): if", "root = os.getcwd() dirs = filesys.Directories() dirs.set_root(root) def main(): if", "import os from . import filesys MAIN_USAGE_MESSAGE = \"\"\" usage:", "== 'init': root = os.getcwd() dirs = filesys.Directories() dirs.set_root(root) def", "command ... Options: positional arguments: command project \"\"\" def project(args):", "command = sys.argv[1] args = sys.argv[2:] if command == 'project':", "sys import os from . import filesys MAIN_USAGE_MESSAGE = \"\"\"", ". import filesys MAIN_USAGE_MESSAGE = \"\"\" usage: xlab command ...", "len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE) exit() command = sys.argv[1] args =", "os from . import filesys MAIN_USAGE_MESSAGE = \"\"\" usage: xlab", "= sys.argv[1] args = sys.argv[2:] if command == 'project': exe", "import sys import os from . import filesys MAIN_USAGE_MESSAGE =", "main(): if len(sys.argv) <= 1: print(MAIN_USAGE_MESSAGE) exit() command = sys.argv[1]", "usage: xlab command ... Options: positional arguments: command project \"\"\"" ]
[ "from ..fluid.dygraph import base as imperative_base from collections import Callable", "implemented based on the AdamW Optimization in paper `DECOUPLED WEIGHT", "2.0 (the \"License\"); # you may not use this file", "2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under", "data type as float32. The default value is 0.9. beta2", "of beta1, expect beta1 in [0,1).\") if not 0 <=", "if core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio' is unimplemented in", "number or a Tensor with shape [1] and data type", "is None: # NOTE(wangxi): for pipeline to set device:all with", "is very large, then the update may be very slow.", "= paddle.mean(out) adam = paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params': linear_1.parameters() },", "\\ different parameter groups such as the learning rate, weight", "there is no need for user to set this property.", "is false. name (str, optional): Normally there is no need", "been calculated, the result will be reused. # NOTE(wangxi): In", "= {\\beta}_1 * moment\\_1 + (1 - {\\beta}_1) * grad", "DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves the problem of L2", "to different result. The default value is False. multi_precision (bool,", "that the learning_rate of linear_2 is 0.01. linear_1 = paddle.nn.Linear(10,", "in both dense mode and sparse mode. If the size", "static mode, at this time all parameters will be updated.", "Whether we should do weight decay for the parameter. with_decay", "optimizer is implemented based on the AdamW Optimization in paper", "} if isinstance(self._beta1, Variable): inputs['Beta1Tensor'] = self._beta1 else: attrs['beta1'] =", "be very slow. The lazy mode only update the element", "\"Moment1\": [moment1], \"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], } #", "parameter is required in dygraph mode. And you can specify", "not isinstance(coeff, framework.Variable): raise TypeError(\"coeff should be float or Tensor.\")", "only update the element that has gradient in current mini-batch,", "1000, \"multi_precision\": find_master, \"with_decay\": with_decay, \"coeff\": self._coeff, \"lr_ratio\": 1. if", "Decay, params:\", \",\".join(self._params_name)]) def _update_param_group(self, parameters): self._coeff = parameters.get('coeff', self._default_dict['coeff'])", "the original Adam algorithm and may lead to different result.", "Variable) else self._beta2.numpy().item(0) _, _, _, _, _ = _C_ops.adamw(", "the License. from .optimizer import Optimizer from .adam import Adam", "framework from ..fluid.framework import Variable from ..fluid.dygraph import base as", "List/Tuple of ``Tensor`` names to update to minimize ``loss``. \\", "License for the specific language governing permissions and # limitations", "= self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master = self._multi_precision and param_and_grad[ 0].dtype ==", "rate used to update ``Parameter``. It can be a float", "= master_weight * decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else: scaled_param =", "is not None \\ and not self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate,", "be float or Tensor.\") self._params_name = set() self._apply_decay_param_fun = apply_decay_param_fun", "\"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master, \"with_decay\": with_decay, \"coeff\": self._coeff,", "Reserved. # # Licensed under the Apache License, Version 2.0", "'coeff', self._coeff, \"lr_ratio\", lr_ratio_) return None inputs = { \"Param\":", "multi_precision=multi_precision) self._default_dict = {'coeff': coeff} self.type = \"adamw\" if core.is_compiled_with_xpu():", "= coeff self._lr_to_coeff = dict() if lr_ratio is not None:", "rate for the 1st moment estimates. It should be a", "learning rate, weight decay, etc, \\ then the parameters are", "None in static mode, at this time all parameters will", "updating. Default is false. name (str, optional): Normally there is", "moment\\_1\\_out & = {\\beta}_1 * moment\\_1 + (1 - {\\beta}_1)", "coeff * lr Args: block: block in which variable is", "optional): A small float value for numerical stability. The default", "and not self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate, float): learning_rate = self._learning_rate", "0.01. linear_1 = paddle.nn.Linear(10, 10) linear_2 = paddle.nn.Linear(10, 10) inp", "assert beta1 is not None assert beta2 is not None", "= {'coeff': coeff} self.type = \"adamw\" if core.is_compiled_with_xpu(): self.type =", "instance of some derived class of ``GradientClipBase`` . There are", "sure _create_param_lr() be called after # optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad)", "attrs['epsilon'] = self._epsilon if find_master: inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"] =", "return optimize_ops def __str__(self): return \" \".join([\"Weight Decay, params:\", \",\".join(self._params_name)])", "= paddle.mean(out) beta1 = paddle.to_tensor([0.9], dtype=\"float32\") beta2 = paddle.to_tensor([0.99], dtype=\"float32\")", "updated. beta1 (float|Tensor, optional): The exponential decay rate for the", "beta2=beta2, weight_decay=0.01) out.backward() adam.step() adam.clear_grad() #Note that the learning_rate of", "AdamW Optimization in paper `DECOUPLED WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it", "r\"\"\" The AdamW optimizer is implemented based on the AdamW", "Args: block: block in which variable is to be created", "very large, then the update may be very slow. The", "}], weight_decay=0.01, beta1=0.9) out.backward() adam.step() adam.clear_grad() \"\"\" def __init__(self, learning_rate=0.001,", "* param) Args: learning_rate (float|LRScheduler, optional): The learning rate used", "specify tensors. Default: None. grad_clip (GradientClipBase, optional): Gradient cliping strategy,", "core from ..fluid import framework from ..fluid.framework import Variable from", "OF ANY KIND, either express or implied. # See the", "key): if key in self._auxiliary_vars: return self._auxiliary_vars[key] else: return None", "See the License for the specific language governing permissions and", "for \\ different parameter groups such as the learning rate,", "0.01. lr_ratio (function|None, optional): If it is not None, the", "to in writing, software # distributed under the License is", "updated with layerwise learning rate ratio. Otherwise, the learning rate", "the learning_rate in paramter groups \\ represents the scale of", "or agreed to in writing, software # distributed under the", "# If it has been calculated, the result will be", "param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW, self)._append_optimize_op(block, param_and_grad) assert", "== core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name] if find_master else None) lr", "with paddle.static.device_guard(None): decay_coeff = 1.0 - learning_rate * self._coeff self._lr_to_coeff[learning_rate]", "it will be much more faster. But this mode has", "as the learning rate, weight decay, etc, \\ then the", "raise TypeError(\"coeff should be float or Tensor.\") self._params_name = set()", "created param_and_grad: (parameters, gradients) pairs, the parameters need to decay.", "and data type as float32. The default value is 0.999.", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "(c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed", "The default value is None in static mode, at this", "with layerwise learning rate ratio. Otherwise, the learning rate is", "multi-precision during weight updating. Default is false. name (str, optional):", "= linear_2(out) loss = paddle.mean(out) adam = paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{", "beta2_pow_acc, param_and_grad[0], moment1, moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode,", "type as float32. The default value is 0.9. beta2 (float|Tensor,", "# Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to achieve that. self._auxiliary_vars", "The default value is None. **Notes**: **Currently, AdamW doesn't support", "from collections import Callable import paddle _C_ops = core.ops __all__", "not use this file except in compliance with the License.", "} # Pass found_inf to adamw, to skip update for", "optional): If it is not None, the learning rate will", "param_and_grad: (parameters, gradients) pairs, the parameters need to decay. Raises:", "lr, moment1, moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1, moment2, beta1_pow_acc, beta2_pow_acc,", "param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc", "you may not use this file except in compliance with", "with shape [1] and data type as float32. The default", "Adam optimizer. .. math:: t & = t + 1", "learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict", "to set this property. For more information, please refer to", "raise ValueError(\"Invaild value of beta2, expect beta2 in [0,1).\") if", "isinstance(self._learning_rate, float): learning_rate = self._learning_rate else: # NOTE. We add", "Examples: .. code-block:: python import paddle linear = paddle.nn.Linear(10, 10)", "1. if self._lr_ratio is None else self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1,", "not None, only tensors that makes apply_decay_param_fun(Tensor.name)==True will be updated", "dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward() adam.step()", "this function to the _append_optimize_op(), # for we must make", "is no need for user to set this property. For", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "type as float32. The default value is 0.999. epsilon (float,", "None, the learning rate will be updated with layerwise learning", "name (str, optional): Normally there is no need for user", "block.append_op( type=self.type, inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True) return adamw_op def _create_optimization_pass(self,", "apply_decay_param_fun (function|None, optional): If it is not None, only tensors", "estimates. It should be a float number or a Tensor", "self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param, grad]), framework.name_scope('weight decay'): self._params_name.add(param.name) # If", "optional): The official Adam algorithm has two moving-average accumulators. The", "(self._multi_precision and param.dtype == core.VarDesc.VarType.FP16) if find_master: master_weight = self._master_weights[param.name]", "default value is 1e-08. weight_decay (float|Tensor, optional): The weight decay", "need for user to set this property. For more information,", "and param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name] if find_master", "you can specify different options for \\ different parameter groups", "optional): The learning rate used to update ``Parameter``. It can", "[] class AdamW(Adam): r\"\"\" The AdamW optimizer is implemented based", "property. For more information, please refer to :ref:`api_guide_Name`. The default", "achieve that. self._auxiliary_vars = dict() def _set_auxiliary_var(self, key, val): self._auxiliary_vars[key]", "{ \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master, \"with_decay\": with_decay, \"coeff\":", "collections import Callable import paddle _C_ops = core.ops __all__ =", "with_decay, \"coeff\": self._coeff, \"lr_ratio\": 1. if self._lr_ratio is None else", "'beta1', _beta1, 'beta2', _beta2, 'coeff', self._coeff, \"lr_ratio\", lr_ratio_) return None", "called after # optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param,", "(self._master_weights[param_and_grad[0].name] if find_master else None) lr = self._create_param_lr(param_and_grad) # create", "must make sure _create_param_lr() be called after # optimizer._create_global_learning_rate(). learning_rate", "we should do weight decay for the parameter. with_decay =", "<= beta1 < 1: raise ValueError(\"Invaild value of beta1, expect", "_beta1 = self._beta1 if not isinstance( self._beta1, Variable) else self._beta1.numpy().item(0)", "LRScheduler. The default value is 0.001. parameters (list|tuple, optional): List/Tuple", "float value or a LRScheduler. The default value is 0.001.", "<https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves the problem of L2 regularization failure", "result will be reused. # NOTE(wangxi): In dygraph mode, apply_gradient", "in dygraph mode. And you can specify different options for", "much more faster. But this mode has different semantics with", "this time all parameters will be updated. beta1 (float|Tensor, optional):", "beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master = self._multi_precision and param_and_grad[ 0].dtype", "time all parameters will be updated. beta1 (float|Tensor, optional): The", "0].dtype == core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name] if find_master else None)", "\"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc],", "learning_rate is not None assert beta1 is not None assert", "value of epsilon, expect epsilon >= 0.\") coeff = weight_decay", "under the License. from .optimizer import Optimizer from .adam import", "imperative_base from collections import Callable import paddle _C_ops = core.ops", "self._update_param_group(param_and_grad) param, grad = param_and_grad if self._apply_decay_param_fun is not None", "default value is None in static mode, at this time", "paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params': linear_1.parameters() }, { 'params': linear_2.parameters(), 'weight_decay':", "__str__(self): return \" \".join([\"Weight Decay, params:\", \",\".join(self._params_name)]) def _update_param_group(self, parameters):", "NPU\") self._lr_ratio = lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2,", "an instance of some derived class of ``GradientClipBase`` . There", "will be updated. beta1 (float|Tensor, optional): The exponential decay rate", "inp = paddle.uniform(shape=[10, 10], min=-0.1, max=0.1) out = linear_1(inp) out", "beta2 (float|Tensor, optional): The exponential decay rate for the 2nd", "raise ValueError(\"Invaild value of beta1, expect beta1 in [0,1).\") if", "from .adam import Adam from ..fluid import core from ..fluid", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "if isinstance(self._learning_rate, float): learning_rate = self._learning_rate else: # NOTE. We", "result. The default value is False. multi_precision (bool, optional): Whether", "is not None, only tensors that makes apply_decay_param_fun(Tensor.name)==True will be", "True if self._apply_decay_param_fun is not None \\ and not self._apply_decay_param_fun(param.name):", "self._auxiliary_vars[key] = val def _get_auxiliary_var(self, key): if key in self._auxiliary_vars:", "paddle.nn.Linear(10, 10) inp = paddle.uniform(shape=[10, 10], min=-0.1, max=0.1) out =", "self._create_param_lr(param_and_grad) # create the adamw optimize op if framework.in_dygraph_mode(): lr_ratio_", "{ \"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\":", "not isinstance( self._beta2, Variable) else self._beta2.numpy().item(0) _, _, _, _,", "file except in compliance with the License. # You may", "param) Args: learning_rate (float|LRScheduler, optional): The learning rate used to", "grad_clip=None, lazy_mode=False, multi_precision=False, name=None): assert learning_rate is not None assert", "beta_pow found_inf = self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate'] = found_inf outputs", "Normally there is no need for user to set this", "decay rate for the 1st moment estimates. It should be", "beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1', _beta1, 'beta2',", "paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self, block, param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad)", "- learning_rate * self._coeff self._lr_to_coeff[learning_rate] = decay_coeff find_master = (self._multi_precision", "Exception: The type of coeff and parameter is not consistent.", "= apply_decay_param_fun self._coeff = coeff self._lr_to_coeff = dict() if lr_ratio", "scaled_param = param * decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self, block,", "self._epsilon if find_master: inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"] = master_weight adamw_op", "self._lr_ratio is None else self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1, Variable): inputs['Beta1Tensor']", "outputs = { \"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\":", "stability. The default value is 1e-08. weight_decay (float|Tensor, optional): The", "the parameters are list of dict. Note that the learning_rate", "may be very slow. The lazy mode only update the", "\",\".join(self._params_name)]) def _update_param_group(self, parameters): self._coeff = parameters.get('coeff', self._default_dict['coeff']) parameters =", "self._apply_decay_param_fun(param.name): with_decay = False moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 =", "beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1, moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode',", "[param, grad]), framework.name_scope('weight decay'): self._params_name.add(param.name) # If it has been", "val): self._auxiliary_vars[key] = val def _get_auxiliary_var(self, key): if key in", "refer to :ref:`api_guide_Name`. The default value is None. **Notes**: **Currently,", "the Adam optimizer. .. math:: t & = t +", "is no gradient clipping. lazy_mode (bool, optional): The official Adam", "this in _create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate, None) if decay_coeff is", "isinstance( self._beta1, Variable) else self._beta1.numpy().item(0) _beta2 = self._beta2 if not", "= self._epsilon else: attrs['epsilon'] = self._epsilon if find_master: inputs[\"MasterParam\"] =", "KIND, either express or implied. # See the License for", "def _set_auxiliary_var(self, key, val): self._auxiliary_vars[key] = val def _get_auxiliary_var(self, key):", "self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1', _beta1, 'beta2', _beta2, 'coeff',", "value is 0.001. parameters (list|tuple, optional): List/Tuple of ``Tensor`` names", "will be executed # every step, so need clear _lr_to_coeff", "algorithm has two moving-average accumulators. The accumulators are updated at", "value of beta2, expect beta2 in [0,1).\") if not 0", "raise NotImplementedError( \"'lr_ratio' is unimplemented in XPU and NPU\") self._lr_ratio", "parameter - parameter * coeff * lr Args: block: block", "beta1 < 1: raise ValueError(\"Invaild value of beta1, expect beta1", "(the \"License\"); # you may not use this file except", "Adam algorithm has two moving-average accumulators. The accumulators are updated", "want to specify tensors. Default: None. grad_clip (GradientClipBase, optional): Gradient", "the problem of L2 regularization failure in the Adam optimizer.", "default value is None. **Notes**: **Currently, AdamW doesn't support sparse", "\\ represents the scale of base learning_rate. \\ The default", "[0,1).\") if not 0 <= beta2 < 1: raise ValueError(\"Invaild", "\\ The default value is None in static mode, at", "= param * decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self, block, param_and_grad):", "learning rate will be updated with layerwise learning rate ratio.", "# # Unless required by applicable law or agreed to", "the scale of base learning_rate. \\ The default value is", "algorithm and may lead to different result. The default value", "= self._update_param_group(param_and_grad) param, grad = param_and_grad if self._apply_decay_param_fun is not", "self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master = self._multi_precision and", "optimize op if framework.in_dygraph_mode(): lr_ratio_ = 1. if self._lr_ratio is", "beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master =", "(float|Tensor, optional): The exponential decay rate for the 2nd moment", "= core.ops __all__ = [] class AdamW(Adam): r\"\"\" The AdamW", "beta1=0.9) out.backward() adam.step() adam.clear_grad() \"\"\" def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999,", "value is 0.999. epsilon (float, optional): A small float value", "paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward() adam.step() adam.clear_grad() #Note that", "= self._epsilon if find_master: inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"] = master_weight", "implied. # See the License for the specific language governing", "linear_2(out) loss = paddle.mean(out) adam = paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params':", "self._beta1 if not isinstance( self._beta1, Variable) else self._beta1.numpy().item(0) _beta2 =", "Optimization in paper `DECOUPLED WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can", "in which variable is to be created param_and_grad: (parameters, gradients)", "isinstance( self._beta2, Variable) else self._beta2.numpy().item(0) _, _, _, _, _", "to the _append_optimize_op(), # for we must make sure _create_param_lr()", "different result. The default value is False. multi_precision (bool, optional):", "\"Grad\": [param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\": [moment1], \"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc],", "if decay_coeff is None: # NOTE(wangxi): for pipeline to set", "* lr Args: block: block in which variable is to", "create the adamw optimize op if framework.in_dygraph_mode(): lr_ratio_ = 1.", "= self._lr_to_coeff.get(learning_rate, None) if decay_coeff is None: # NOTE(wangxi): for", "learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False,", "= 1. if self._lr_ratio is None else self._lr_ratio( param_and_grad[0]) _beta1", "apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False, name=None): assert learning_rate is not None", "is not None, the learning rate will be updated with", "the learning rate is the original. Default: None. apply_decay_param_fun (function|None,", "we want to specify tensors. Default: None. grad_clip (GradientClipBase, optional):", "parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False, name=None): assert learning_rate", "self._apply_decay_param_fun is not None \\ and not self._apply_decay_param_fun(param.name): return if", "{\\beta}_1) * grad moemnt\\_2\\_out & = {\\beta}_2 * moment\\_2 +", "optional): The exponential decay rate for the 2nd moment estimates.", "linear_2 = paddle.nn.Linear(10, 10) inp = paddle.uniform(shape=[10, 10], min=-0.1, max=0.1)", "epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict = {'coeff': coeff} self.type", "self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision)", "= dict() if lr_ratio is not None: assert isinstance(lr_ratio, Callable)", "Add decoupled weight decay op. parameter = parameter - parameter", "if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad", "learning_rate in paramter groups \\ represents the scale of base", "found_inf = self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate'] = found_inf outputs =", "[beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], } # Pass found_inf to adamw, to", "Unless required by applicable law or agreed to in writing,", "(1 - {\\beta}_2) * grad * grad learning\\_rate & =", "def _update_param_group(self, parameters): self._coeff = parameters.get('coeff', self._default_dict['coeff']) parameters = parameters.get('params')", "to adamw, to skip update for not only param, but", "inputs['Beta1Tensor'] = self._beta1 else: attrs['beta1'] = self._beta1 if isinstance(self._beta2, Variable):", "the specific language governing permissions and # limitations under the", "& = {\\beta}_1 * moment\\_1 + (1 - {\\beta}_1) *", "not self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate, float): learning_rate = self._learning_rate else:", "'params': linear_1.parameters() }, { 'params': linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate': 0.1,", "is to be created param_and_grad: (parameters, gradients) pairs, the parameters", "gradient clipping. lazy_mode (bool, optional): The official Adam algorithm has", "self._params_name.add(param.name) # If it has been calculated, the result will", "multi_precision (bool, optional): Whether to use multi-precision during weight updating.", "of parameter is very large, then the update may be", "find_master = self._multi_precision and param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16 master_weight =", "be much more faster. But this mode has different semantics", "If it is not None, only tensors that makes apply_decay_param_fun(Tensor.name)==True", "= t + 1 moment\\_1\\_out & = {\\beta}_1 * moment\\_1", "= (self._multi_precision and param.dtype == core.VarDesc.VarType.FP16) if find_master: master_weight =", "decay'): self._params_name.add(param.name) # If it has been calculated, the result", "self._lr_ratio = lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon,", "= {\\beta}_2 * moment\\_2 + (1 - {\\beta}_2) * grad", "if not 0 <= epsilon: raise ValueError(\"Invaild value of epsilon,", "optional): The weight decay coefficient, it can be float or", "It should be a float number or a Tensor with", "else self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1, Variable): inputs['Beta1Tensor'] = self._beta1 else:", "output=master_weight) else: scaled_param = param * decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def", "self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate'] = found_inf outputs = { \"ParamOut\":", "param_and_grad # Whether we should do weight decay for the", "be created param_and_grad: (parameters, gradients) pairs, the parameters need to", "then the update may be very slow. The lazy mode", "learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} + \\lambda * param) Args:", "python import paddle linear = paddle.nn.Linear(10, 10) inp = paddle.rand([10,10],", "value is None in static mode, at this time all", "adam.clear_grad() \"\"\" def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01,", "* \\frac{\\sqrt{1 - {\\beta}_2^t}}{1 - {beta}_1^t} param\\_out & = param", "strategy, it's an instance of some derived class of ``GradientClipBase``", "lazy_mode (bool, optional): The official Adam algorithm has two moving-average", "is not None assert beta2 is not None assert epsilon", "the parameters need to decay. Raises: Exception: The type of", "should do weight decay for the parameter. with_decay = True", "problem of L2 regularization failure in the Adam optimizer. ..", "isinstance(coeff, float) and \\ not isinstance(coeff, framework.Variable): raise TypeError(\"coeff should", "= dict() def _set_auxiliary_var(self, key, val): self._auxiliary_vars[key] = val def", "return \" \".join([\"Weight Decay, params:\", \",\".join(self._params_name)]) def _update_param_group(self, parameters): self._coeff", "adam.step() adam.clear_grad() \"\"\" def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None,", "import Adam from ..fluid import core from ..fluid import framework", "parameters will be updated. beta1 (float|Tensor, optional): The exponential decay", "<= epsilon: raise ValueError(\"Invaild value of epsilon, expect epsilon >=", "adamw_op def _create_optimization_pass(self, parameters_and_grads): optimize_ops = super( AdamW, self)._create_optimization_pass(parameters_and_grads) #", "use multi-precision during weight updating. Default is false. name (str,", "isinstance(block, framework.Block) if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad", "doesn't support sparse parameter optimization.** Examples: .. code-block:: python import", "self._lr_to_coeff = dict() if lr_ratio is not None: assert isinstance(lr_ratio,", "specify different options for \\ different parameter groups such as", "mode has different semantics with the original Adam algorithm and", "\"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], } attrs = { \"lazy_mode\": self._lazy_mode,", "adamw_op = block.append_op( type=self.type, inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True) return adamw_op", "two moving-average is updated in both dense mode and sparse", "attrs = { \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master, \"with_decay\":", "0 <= epsilon: raise ValueError(\"Invaild value of epsilon, expect epsilon", "# optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param, grad]), framework.name_scope('weight", "param * decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self, block, param_and_grad): if", "pipeline to set device:all with paddle.static.device_guard(None): decay_coeff = 1.0 -", "You may obtain a copy of the License at #", "'beta1': 0.8 }], weight_decay=0.01, beta1=0.9) out.backward() adam.step() adam.clear_grad() \"\"\" def", "else: scaled_param = param * decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self,", "names to update to minimize ``loss``. \\ This parameter is", "And you can specify different options for \\ different parameter", "= param - learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} + \\lambda", "represents the scale of base learning_rate. \\ The default value", "None, only tensors that makes apply_decay_param_fun(Tensor.name)==True will be updated with", "dict. Note that the learning_rate in paramter groups \\ represents", "if core.is_compiled_with_xpu(): self.type = \"adam\" # Use _auxiliary_vars together with", "gradients) pairs, the parameters need to decay. Raises: Exception: The", "self._beta1 if isinstance(self._beta2, Variable): inputs['Beta2Tensor'] = self._beta2 else: attrs['beta2'] =", "float value for numerical stability. The default value is 1e-08.", "# NOTE. We add this function to the _append_optimize_op(), #", "be updated with weight decay. It only works when we", "* moment\\_2 + (1 - {\\beta}_2) * grad * grad", "\"\"\" def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None,", "\"coeff\": self._coeff, \"lr_ratio\": 1. if self._lr_ratio is None else self._lr_ratio(param_and_grad[0])", "= self._create_param_lr(param_and_grad) # create the adamw optimize op if framework.in_dygraph_mode():", "# create the adamw optimize op if framework.in_dygraph_mode(): lr_ratio_ =", "value for numerical stability. The default value is 1e-08. weight_decay", "< 1: raise ValueError(\"Invaild value of beta1, expect beta1 in", "may lead to different result. The default value is False.", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "'min_row_size_to_use_multithread', 1000, 'beta1', _beta1, 'beta2', _beta2, 'coeff', self._coeff, \"lr_ratio\", lr_ratio_)", "is None in static mode, at this time all parameters", "param_and_grad[0]) find_master = self._multi_precision and param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16 master_weight", "weight updating. Default is false. name (str, optional): Normally there", "language governing permissions and # limitations under the License. from", "step, so need clear _lr_to_coeff every step, # we do", "different options for \\ different parameter groups such as the", "moment1, moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1, moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon',", "self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate, float): learning_rate = self._learning_rate else: #", "'params': linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate': 0.1, 'beta1': 0.8 }], weight_decay=0.01,", "coeff and parameter is not consistent. \"\"\" if isinstance(param_and_grad, dict):", "NotImplementedError( \"'lr_ratio' is unimplemented in XPU and NPU\") self._lr_ratio =", "self._apply_decay_param_fun = apply_decay_param_fun self._coeff = coeff self._lr_to_coeff = dict() if", "Tensor.\") self._params_name = set() self._apply_decay_param_fun = apply_decay_param_fun self._coeff = coeff", "grad]), framework.name_scope('weight decay'): self._params_name.add(param.name) # If it has been calculated,", "{ 'params': linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate': 0.1, 'beta1': 0.8 }],", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "mode, clear _lr_to_coeff after applied gradient self._lr_to_coeff = dict() return", "if not isinstance( self._beta1, Variable) else self._beta1.numpy().item(0) _beta2 = self._beta2", "from ..fluid.framework import Variable from ..fluid.dygraph import base as imperative_base", "element of the two moving-average is updated in both dense", "is not None assert beta1 is not None assert beta2", "Default: None. apply_decay_param_fun (function|None, optional): If it is not None,", "_, _, _, _, _ = _C_ops.adamw( param_and_grad[0], param_and_grad[1], lr,", "epsilon, expect epsilon >= 0.\") coeff = weight_decay if not", "of L2 regularization failure in the Adam optimizer. .. math::", "information, please refer to :ref:`api_guide_Name`. The default value is None.", "can be a float value or a LRScheduler. The default", "not None \\ and not self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate, float):", "adamw optimize op if framework.in_dygraph_mode(): lr_ratio_ = 1. if self._lr_ratio", "not None assert beta1 is not None assert beta2 is", "find_master else None) lr = self._create_param_lr(param_and_grad) # create the adamw", "(bool, optional): Whether to use multi-precision during weight updating. Default", "param_and_grad[0]) _beta1 = self._beta1 if not isinstance( self._beta1, Variable) else", "\"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], } # Pass found_inf to adamw,", "if not 0 <= beta2 < 1: raise ValueError(\"Invaild value", "There are three cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` ,", "and data type as float32. The default value is 0.9.", "``loss``. \\ This parameter is required in dygraph mode. And", "is not None if not 0 <= beta1 < 1:", "decay coefficient, it can be float or Tensor. The default", "core.ops __all__ = [] class AdamW(Adam): r\"\"\" The AdamW optimizer", "isinstance(self._beta2, Variable): inputs['Beta2Tensor'] = self._beta2 else: attrs['beta2'] = self._beta2 if", "In dygraph mode, apply_gradient will be executed # every step,", "mode and sparse mode. If the size of parameter is", "float32. The default value is 0.9. beta2 (float|Tensor, optional): The", "has two moving-average accumulators. The accumulators are updated at every", "+ \\epsilon} + \\lambda * param) Args: learning_rate (float|LRScheduler, optional):", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ValueError(\"Invaild value of beta1, expect beta1 in [0,1).\") if not", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "dygraph mode. And you can specify different options for \\", "be float or Tensor. The default value is 0.01. lr_ratio", "We add this function to the _append_optimize_op(), # for we", "lazy mode only update the element that has gradient in", "rate for the 2nd moment estimates. It should be a", "core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio' is unimplemented in XPU and NPU\")", "required by applicable law or agreed to in writing, software", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "In dygraph mode, clear _lr_to_coeff after applied gradient self._lr_to_coeff =", "# we do this in _create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate, None)", ":ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient clipping.", "param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name] if find_master else", "Raises: Exception: The type of coeff and parameter is not", "License. from .optimizer import Optimizer from .adam import Adam from", "beta2 < 1: raise ValueError(\"Invaild value of beta2, expect beta2", "[moment1], \"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], } # Pass", "agreed to in writing, software # distributed under the License", "(GradientClipBase, optional): Gradient cliping strategy, it's an instance of some", "distributed under the License is distributed on an \"AS IS\"", "moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1',", "self._coeff = coeff self._lr_to_coeff = dict() if lr_ratio is not", "after applied gradient self._lr_to_coeff = dict() return optimize_ops def __str__(self):", "Variable) else self._beta1.numpy().item(0) _beta2 = self._beta2 if not isinstance( self._beta2,", "of dict. Note that the learning_rate in paramter groups \\", ":ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no", "}, { 'params': linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate': 0.1, 'beta1': 0.8", "1000, 'beta1', _beta1, 'beta2', _beta2, 'coeff', self._coeff, \"lr_ratio\", lr_ratio_) return", "if self._apply_decay_param_fun is not None \\ and not self._apply_decay_param_fun(param.name): return", "executed # every step, so need clear _lr_to_coeff every step,", "coeff self._lr_to_coeff = dict() if lr_ratio is not None: assert", "default value is 0.999. epsilon (float, optional): A small float", "``Parameter``. It can be a float value or a LRScheduler.", "block.program._optimized_guard( [param, grad]), framework.name_scope('weight decay'): self._params_name.add(param.name) # If it has", "as imperative_base from collections import Callable import paddle _C_ops =", "\"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], } # Pass found_inf", "parameters_and_grads): optimize_ops = super( AdamW, self)._create_optimization_pass(parameters_and_grads) # In dygraph mode,", "# NOTE(wangxi): for pipeline to set device:all with paddle.static.device_guard(None): decay_coeff", "paddle.rand([10,10], dtype=\"float32\") out = linear(inp) loss = paddle.mean(out) beta1 =", "value of beta1, expect beta1 in [0,1).\") if not 0", "rate, weight decay, etc, \\ then the parameters are list", "_beta1, 'beta2', _beta2, 'coeff', self._coeff, \"lr_ratio\", lr_ratio_) return None inputs", "\"Beta2Pow\": [beta2_pow_acc], } # Pass found_inf to adamw, to skip", "found_inf outputs = { \"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\": [moment2],", "inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"] = master_weight adamw_op = block.append_op( type=self.type,", "param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master", "every step, # we do this in _create_optimization_pass decay_coeff =", "The lazy mode only update the element that has gradient", "OR CONDITIONS OF ANY KIND, either express or implied. #", "the License is distributed on an \"AS IS\" BASIS, #", "it's an instance of some derived class of ``GradientClipBase`` .", "dtype=\"float32\") beta2 = paddle.to_tensor([0.99], dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1,", "self._beta2 if not isinstance( self._beta2, Variable) else self._beta2.numpy().item(0) _, _,", "apply_gradient will be executed # every step, so need clear", "[beta2_pow_acc], } # Pass found_inf to adamw, to skip update", "[moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], } attrs = { \"lazy_mode\":", "dict() def _set_auxiliary_var(self, key, val): self._auxiliary_vars[key] = val def _get_auxiliary_var(self,", "= self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate'] = found_inf outputs = {", "size of parameter is very large, then the update may", "law or agreed to in writing, software # distributed under", "self._learning_rate else: # NOTE. We add this function to the", "semantics with the original Adam algorithm and may lead to", "else self._lr_ratio( param_and_grad[0]) _beta1 = self._beta1 if not isinstance( self._beta1,", "to update to minimize ``loss``. \\ This parameter is required", "= paddle.nn.Linear(10, 10) linear_2 = paddle.nn.Linear(10, 10) inp = paddle.uniform(shape=[10,", "optional): The exponential decay rate for the 1st moment estimates.", "* grad learning\\_rate & = learning\\_rate * \\frac{\\sqrt{1 - {\\beta}_2^t}}{1", "optional): If it is not None, only tensors that makes", "return if isinstance(self._learning_rate, float): learning_rate = self._learning_rate else: # NOTE.", "attrs['beta1'] = self._beta1 if isinstance(self._beta2, Variable): inputs['Beta2Tensor'] = self._beta2 else:", "small float value for numerical stability. The default value is", "may obtain a copy of the License at # #", ">= 0.\") coeff = weight_decay if not isinstance(coeff, float) and", "raise ValueError(\"Invaild value of epsilon, expect epsilon >= 0.\") coeff", "_, _, _, _ = _C_ops.adamw( param_and_grad[0], param_and_grad[1], lr, moment1,", "return adamw_op def _create_optimization_pass(self, parameters_and_grads): optimize_ops = super( AdamW, self)._create_optimization_pass(parameters_and_grads)", "TypeError(\"coeff should be float or Tensor.\") self._params_name = set() self._apply_decay_param_fun", "Optimizer from .adam import Adam from ..fluid import core from", "__all__ = [] class AdamW(Adam): r\"\"\" The AdamW optimizer is", "very slow. The lazy mode only update the element that", "# NOTE(wangxi): In dygraph mode, apply_gradient will be executed #", "if key in self._auxiliary_vars: return self._auxiliary_vars[key] else: return None def", "to set device:all with paddle.static.device_guard(None): decay_coeff = 1.0 - learning_rate", "may not use this file except in compliance with the", "return self._auxiliary_vars[key] else: return None def _append_decoupled_weight_decay(self, block, param_and_grad): \"\"\"", "linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate': 0.1, 'beta1': 0.8 }], weight_decay=0.01, beta1=0.9)", "isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio' is", "beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward() adam.step() adam.clear_grad() #Note that the learning_rate", "value is None. **Notes**: **Currently, AdamW doesn't support sparse parameter", "# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. #", "attrs=attrs, stop_gradient=True) return adamw_op def _create_optimization_pass(self, parameters_and_grads): optimize_ops = super(", "this file except in compliance with the License. # You", "of ``Tensor`` names to update to minimize ``loss``. \\ This", "grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance of", "not None, the learning rate will be updated with layerwise", "loss = paddle.mean(out) beta1 = paddle.to_tensor([0.9], dtype=\"float32\") beta2 = paddle.to_tensor([0.99],", "= self._beta2 else: attrs['beta2'] = self._beta2 if isinstance(self._epsilon, Variable): inputs['EpsilonTensor']", "block: block in which variable is to be created param_and_grad:", "at this time all parameters will be updated. beta1 (float|Tensor,", "# # Licensed under the Apache License, Version 2.0 (the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else: scaled_param = param * decay_coeff", "linear_1.parameters() }, { 'params': linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate': 0.1, 'beta1':", "dense mode and sparse mode. If the size of parameter", "parameters are list of dict. Note that the learning_rate in", "works when we want to specify tensors. Default: None. grad_clip", "a float value or a LRScheduler. The default value is", "Pass found_inf to adamw, to skip update for not only", "has different semantics with the original Adam algorithm and may", "parameter. with_decay = True if self._apply_decay_param_fun is not None \\", "the parameter. with_decay = True if self._apply_decay_param_fun is not None", "weight decay for the parameter. with_decay = True if self._apply_decay_param_fun", "current mini-batch, so it will be much more faster. But", "also momentum and beta_pow found_inf = self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate']", "param_and_grad if self._apply_decay_param_fun is not None \\ and not self._apply_decay_param_fun(param.name):", "_create_param_lr() be called after # optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad) with", "pairs, the parameters need to decay. Raises: Exception: The type", "be called after # optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad) with block.program._optimized_guard(", "[1] and data type as float32. The default value is", "self.type = \"adam\" # Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to", "& = param - learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} +", "governing permissions and # limitations under the License. from .optimizer", "from .optimizer import Optimizer from .adam import Adam from ..fluid", "AdamW doesn't support sparse parameter optimization.** Examples: .. code-block:: python", "or implied. # See the License for the specific language", "``GradientClipBase`` . There are three cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` ,", "it is not None, the learning rate will be updated", "Tensor with shape [1] and data type as float32. The", ".. code-block:: python import paddle linear = paddle.nn.Linear(10, 10) inp", "default value is False. multi_precision (bool, optional): Whether to use", "adam = paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params': linear_1.parameters() }, { 'params':", "is 0.01. linear_1 = paddle.nn.Linear(10, 10) linear_2 = paddle.nn.Linear(10, 10)", "if not isinstance(coeff, float) and \\ not isinstance(coeff, framework.Variable): raise", "isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad if", "None else self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1, Variable): inputs['Beta1Tensor'] = self._beta1", "t & = t + 1 moment\\_1\\_out & = {\\beta}_1", "= found_inf outputs = { \"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\":", "the original. Default: None. apply_decay_param_fun (function|None, optional): If it is", "in [0,1).\") if not 0 <= epsilon: raise ValueError(\"Invaild value", "else self._beta1.numpy().item(0) _beta2 = self._beta2 if not isinstance( self._beta2, Variable)", "1e-08. weight_decay (float|Tensor, optional): The weight decay coefficient, it can", "momentum and beta_pow found_inf = self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate'] =", "learning_rate of linear_2 is 0.01. linear_1 = paddle.nn.Linear(10, 10) linear_2", "NOTE(wangxi): for pipeline to set device:all with paddle.static.device_guard(None): decay_coeff =", "* moment\\_1 + (1 - {\\beta}_1) * grad moemnt\\_2\\_out &", "master_weight = self._master_weights[param.name] scaled_param = master_weight * decay_coeff paddle.fluid.layers.assign( input=scaled_param,", "block in which variable is to be created param_and_grad: (parameters,", "(\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} + \\lambda * param) Args: learning_rate (float|LRScheduler,", "_set_auxiliary_var(self, key, val): self._auxiliary_vars[key] = val def _get_auxiliary_var(self, key): if", "self.type = \"adamw\" if core.is_compiled_with_xpu(): self.type = \"adam\" # Use", "parameter is very large, then the update may be very", "learning\\_rate & = learning\\_rate * \\frac{\\sqrt{1 - {\\beta}_2^t}}{1 - {beta}_1^t}", "beta2 = paddle.to_tensor([0.99], dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2,", "different parameter groups such as the learning rate, weight decay,", "type=self.type, inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True) return adamw_op def _create_optimization_pass(self, parameters_and_grads):", "expect beta1 in [0,1).\") if not 0 <= beta2 <", "during weight updating. Default is false. name (str, optional): Normally", "1: raise ValueError(\"Invaild value of beta2, expect beta2 in [0,1).\")", "optional): List/Tuple of ``Tensor`` names to update to minimize ``loss``.", "decay_coeff = 1.0 - learning_rate * self._coeff self._lr_to_coeff[learning_rate] = decay_coeff", "- parameter * coeff * lr Args: block: block in", "2nd moment estimates. It should be a float number or", "and # limitations under the License. from .optimizer import Optimizer", "applied gradient self._lr_to_coeff = dict() return optimize_ops def __str__(self): return", "<= beta2 < 1: raise ValueError(\"Invaild value of beta2, expect", "tensors that makes apply_decay_param_fun(Tensor.name)==True will be updated with weight decay.", "self._beta1, Variable) else self._beta1.numpy().item(0) _beta2 = self._beta2 if not isinstance(", "self._lr_to_coeff = dict() return optimize_ops def __str__(self): return \" \".join([\"Weight", "lazy_mode=False, multi_precision=False, name=None): assert learning_rate is not None assert beta1", "adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward() adam.step() adam.clear_grad()", "{\\beta}_2^t}}{1 - {beta}_1^t} param\\_out & = param - learning\\_rate *", "function to the _append_optimize_op(), # for we must make sure", "learning rate is the original. Default: None. apply_decay_param_fun (function|None, optional):", "(1 - {\\beta}_1) * grad moemnt\\_2\\_out & = {\\beta}_2 *", "the 2nd moment estimates. It should be a float number", "no need for user to set this property. For more", "self._coeff, \"lr_ratio\", lr_ratio_) return None inputs = { \"Param\": [param_and_grad[0]],", "can specify different options for \\ different parameter groups such", "= [] class AdamW(Adam): r\"\"\" The AdamW optimizer is implemented", "\"Moment1Out\": [moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], } attrs", "* self._coeff self._lr_to_coeff[learning_rate] = decay_coeff find_master = (self._multi_precision and param.dtype", "0.\") coeff = weight_decay if not isinstance(coeff, float) and \\", "used to update ``Parameter``. It can be a float value", "= self._beta2 if isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] = self._epsilon else: attrs['epsilon']", "should be float or Tensor.\") self._params_name = set() self._apply_decay_param_fun =", "if self._apply_decay_param_fun is not None \\ and not self._apply_decay_param_fun(param.name): with_decay", "based on the AdamW Optimization in paper `DECOUPLED WEIGHT DECAY", "in _create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate, None) if decay_coeff is None:", "moving-average is updated in both dense mode and sparse mode.", "only works when we want to specify tensors. Default: None.", "moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc =", "with block.program._optimized_guard( [param, grad]), framework.name_scope('weight decay'): self._params_name.add(param.name) # If it", "optimize_ops def __str__(self): return \" \".join([\"Weight Decay, params:\", \",\".join(self._params_name)]) def", "gradient in current mini-batch, so it will be much more", "that the learning_rate in paramter groups \\ represents the scale", "the two moving-average is updated in both dense mode and", "'learning_rate': 0.1, 'beta1': 0.8 }], weight_decay=0.01, beta1=0.9) out.backward() adam.step() adam.clear_grad()", "self._auxiliary_vars = dict() def _set_auxiliary_var(self, key, val): self._auxiliary_vars[key] = val", "_get_auxiliary_var(self, key): if key in self._auxiliary_vars: return self._auxiliary_vars[key] else: return", "master_weight outputs[\"MasterParamOut\"] = master_weight adamw_op = block.append_op( type=self.type, inputs=inputs, outputs=outputs,", "of epsilon, expect epsilon >= 0.\") coeff = weight_decay if", "cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ). Default", "parameters): self._coeff = parameters.get('coeff', self._default_dict['coeff']) parameters = parameters.get('params') return parameters", "to update ``Parameter``. It can be a float value or", "val def _get_auxiliary_var(self, key): if key in self._auxiliary_vars: return self._auxiliary_vars[key]", "( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning", "not None if not 0 <= beta1 < 1: raise", "_lr_to_coeff every step, # we do this in _create_optimization_pass decay_coeff", "shape [1] and data type as float32. The default value", "for not only param, but also momentum and beta_pow found_inf", "in writing, software # distributed under the License is distributed", "= paddle.uniform(shape=[10, 10], min=-0.1, max=0.1) out = linear_1(inp) out =", "paddle linear = paddle.nn.Linear(10, 10) inp = paddle.rand([10,10], dtype=\"float32\") out", "self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master = self._multi_precision and param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16", "[param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], }", "). Default None, meaning there is no gradient clipping. lazy_mode", "and may lead to different result. The default value is", "multi_precision=False, name=None): assert learning_rate is not None assert beta1 is", "param_and_grad): \"\"\" Add decoupled weight decay op. parameter = parameter", "= self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str,", "not 0 <= beta2 < 1: raise ValueError(\"Invaild value of", "\"LearningRate\": [lr], \"Moment1\": [moment1], \"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc],", "tensors. Default: None. grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's", "the element that has gradient in current mini-batch, so it", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "Adam algorithm and may lead to different result. The default", "updated at every step. Every element of the two moving-average", "of beta2, expect beta2 in [0,1).\") if not 0 <=", "0.1, 'beta1': 0.8 }], weight_decay=0.01, beta1=0.9) out.backward() adam.step() adam.clear_grad() \"\"\"", "master_weight * decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else: scaled_param = param", "[moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], } attrs =", "we must make sure _create_param_lr() be called after # optimizer._create_global_learning_rate().", "+ (1 - {\\beta}_2) * grad * grad learning\\_rate &", "decay. It only works when we want to specify tensors.", "The default value is 0.001. parameters (list|tuple, optional): List/Tuple of", "self._coeff, \"lr_ratio\": 1. if self._lr_ratio is None else self._lr_ratio(param_and_grad[0]) }", "list of dict. Note that the learning_rate in paramter groups", "a Tensor with shape [1] and data type as float32.", "has been calculated, the result will be reused. # NOTE(wangxi):", "= block.append_op( type=self.type, inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True) return adamw_op def", "the License for the specific language governing permissions and #", "found_inf: inputs['SkipUpdate'] = found_inf outputs = { \"ParamOut\": [param_and_grad[0]], \"Moment1Out\":", "Whether to use multi-precision during weight updating. Default is false.", "skip update for not only param, but also momentum and", "t + 1 moment\\_1\\_out & = {\\beta}_1 * moment\\_1 +", "import core from ..fluid import framework from ..fluid.framework import Variable", "update to minimize ``loss``. \\ This parameter is required in", "param_and_grad) assert isinstance(block, framework.Block) if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad)", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "element that has gradient in current mini-batch, so it will", "find_master: master_weight = self._master_weights[param.name] scaled_param = master_weight * decay_coeff paddle.fluid.layers.assign(", "on the AdamW Optimization in paper `DECOUPLED WEIGHT DECAY REGULARIZATION", "learning\\_rate * \\frac{\\sqrt{1 - {\\beta}_2^t}}{1 - {beta}_1^t} param\\_out & =", "dygraph mode, apply_gradient will be executed # every step, so", "Variable): inputs['Beta2Tensor'] = self._beta2 else: attrs['beta2'] = self._beta2 if isinstance(self._epsilon,", "ratio. Otherwise, the learning rate is the original. Default: None.", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "is not None assert epsilon is not None if not", "clear _lr_to_coeff every step, # we do this in _create_optimization_pass", "A small float value for numerical stability. The default value", "weight decay. It only works when we want to specify", "# for we must make sure _create_param_lr() be called after", "sparse parameter optimization.** Examples: .. code-block:: python import paddle linear", "self._auxiliary_vars[key] else: return None def _append_decoupled_weight_decay(self, block, param_and_grad): \"\"\" Add", "optional): Gradient cliping strategy, it's an instance of some derived", "parameter optimization.** Examples: .. code-block:: python import paddle linear =", "\".join([\"Weight Decay, params:\", \",\".join(self._params_name)]) def _update_param_group(self, parameters): self._coeff = parameters.get('coeff',", "= self._beta1 else: attrs['beta1'] = self._beta1 if isinstance(self._beta2, Variable): inputs['Beta2Tensor']", "weight decay op. parameter = parameter - parameter * coeff", "- {\\beta}_2) * grad * grad learning\\_rate & = learning\\_rate", "\\ This parameter is required in dygraph mode. And you", "and beta_pow found_inf = self._get_auxiliary_var('found_inf') if found_inf: inputs['SkipUpdate'] = found_inf", "in static mode, at this time all parameters will be", "only param, but also momentum and beta_pow found_inf = self._get_auxiliary_var('found_inf')", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "_update_param_group(self, parameters): self._coeff = parameters.get('coeff', self._default_dict['coeff']) parameters = parameters.get('params') return", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "epsilon: raise ValueError(\"Invaild value of epsilon, expect epsilon >= 0.\")", "self._params_name = set() self._apply_decay_param_fun = apply_decay_param_fun self._coeff = coeff self._lr_to_coeff", "weight_decay if not isinstance(coeff, float) and \\ not isinstance(coeff, framework.Variable):", "_append_optimize_op(), # for we must make sure _create_param_lr() be called", "not isinstance( self._beta1, Variable) else self._beta1.numpy().item(0) _beta2 = self._beta2 if", "_beta2, 'coeff', self._coeff, \"lr_ratio\", lr_ratio_) return None inputs = {", "\\ and not self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate, float): learning_rate =", "(list|tuple, optional): List/Tuple of ``Tensor`` names to update to minimize", "If the size of parameter is very large, then the", "beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False, name=None):", "self._multi_precision and param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name] if", "float32. The default value is 0.999. epsilon (float, optional): A", "Note that the learning_rate in paramter groups \\ represents the", "the Apache License, Version 2.0 (the \"License\"); # you may", "name=None): assert learning_rate is not None assert beta1 is not", "beta2, expect beta2 in [0,1).\") if not 0 <= epsilon:", "gradient self._lr_to_coeff = dict() return optimize_ops def __str__(self): return \"", "and \\ not isinstance(coeff, framework.Variable): raise TypeError(\"coeff should be float", "to use multi-precision during weight updating. Default is false. name", "adam.step() adam.clear_grad() #Note that the learning_rate of linear_2 is 0.01.", "is 0.9. beta2 (float|Tensor, optional): The exponential decay rate for", "for the 1st moment estimates. It should be a float", "is 0.001. parameters (list|tuple, optional): List/Tuple of ``Tensor`` names to", "of linear_2 is 0.01. linear_1 = paddle.nn.Linear(10, 10) linear_2 =", "default value is 0.9. beta2 (float|Tensor, optional): The exponential decay", "= linear(inp) loss = paddle.mean(out) beta1 = paddle.to_tensor([0.9], dtype=\"float32\") beta2", "mode. If the size of parameter is very large, then", "= False moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0])", "coeff} self.type = \"adamw\" if core.is_compiled_with_xpu(): self.type = \"adam\" #", "or core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio' is unimplemented in XPU and", "Gradient cliping strategy, it's an instance of some derived class", "is updated in both dense mode and sparse mode. If", "* coeff * lr Args: block: block in which variable", "if found_inf: inputs['SkipUpdate'] = found_inf outputs = { \"ParamOut\": [param_and_grad[0]],", "beta1 is not None assert beta2 is not None assert", "self._auxiliary_vars: return self._auxiliary_vars[key] else: return None def _append_decoupled_weight_decay(self, block, param_and_grad):", "adamw, to skip update for not only param, but also", "for pipeline to set device:all with paddle.static.device_guard(None): decay_coeff = 1.0", "has gradient in current mini-batch, so it will be much", "lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False, name=None): assert learning_rate is not", "optimizer. .. math:: t & = t + 1 moment\\_1\\_out", "rate is the original. Default: None. apply_decay_param_fun (function|None, optional): If", "\"with_decay\": with_decay, \"coeff\": self._coeff, \"lr_ratio\": 1. if self._lr_ratio is None", "of some derived class of ``GradientClipBase`` . There are three", "inputs['SkipUpdate'] = found_inf outputs = { \"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1],", "the update may be very slow. The lazy mode only", "paddle _C_ops = core.ops __all__ = [] class AdamW(Adam): r\"\"\"", "need to decay. Raises: Exception: The type of coeff and", "under the License is distributed on an \"AS IS\" BASIS,", "ValueError(\"Invaild value of epsilon, expect epsilon >= 0.\") coeff =", "+ (1 - {\\beta}_1) * grad moemnt\\_2\\_out & = {\\beta}_2", "original Adam algorithm and may lead to different result. The", "\\ not isinstance(coeff, framework.Variable): raise TypeError(\"coeff should be float or", "param.dtype == core.VarDesc.VarType.FP16) if find_master: master_weight = self._master_weights[param.name] scaled_param =", "moemnt\\_2\\_out & = {\\beta}_2 * moment\\_2 + (1 - {\\beta}_2)", "mini-batch, so it will be much more faster. But this", "accumulators are updated at every step. Every element of the", "(function|None, optional): If it is not None, only tensors that", "None else self._lr_ratio( param_and_grad[0]) _beta1 = self._beta1 if not isinstance(", "optional): Normally there is no need for user to set", "self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0])", "assert isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio'", "10) linear_2 = paddle.nn.Linear(10, 10) inp = paddle.uniform(shape=[10, 10], min=-0.1,", "param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad # Whether we", "with_decay = True if self._apply_decay_param_fun is not None \\ and", "_append_decoupled_weight_decay(self, block, param_and_grad): \"\"\" Add decoupled weight decay op. parameter", "value is 0.9. beta2 (float|Tensor, optional): The exponential decay rate", "found_inf to adamw, to skip update for not only param,", "grad learning\\_rate & = learning\\_rate * \\frac{\\sqrt{1 - {\\beta}_2^t}}{1 -", "out = linear_2(out) loss = paddle.mean(out) adam = paddle.optimizer.AdamW( learning_rate=0.1,", "The official Adam algorithm has two moving-average accumulators. The accumulators", "10], min=-0.1, max=0.1) out = linear_1(inp) out = linear_2(out) loss", "default value is 0.01. lr_ratio (function|None, optional): If it is", "be a float value or a LRScheduler. The default value", "param, grad = param_and_grad if self._apply_decay_param_fun is not None \\", "moment\\_1 + (1 - {\\beta}_1) * grad moemnt\\_2\\_out & =", "learning_rate = self._learning_rate else: # NOTE. We add this function", "from ..fluid import framework from ..fluid.framework import Variable from ..fluid.dygraph", "optimize_ops = super( AdamW, self)._create_optimization_pass(parameters_and_grads) # In dygraph mode, clear", "is 0.01. lr_ratio (function|None, optional): If it is not None,", "But this mode has different semantics with the original Adam", "is not None \\ and not self._apply_decay_param_fun(param.name): with_decay = False", "..fluid import core from ..fluid import framework from ..fluid.framework import", ".optimizer import Optimizer from .adam import Adam from ..fluid import", "inputs['EpsilonTensor'] = self._epsilon else: attrs['epsilon'] = self._epsilon if find_master: inputs[\"MasterParam\"]", "set device:all with paddle.static.device_guard(None): decay_coeff = 1.0 - learning_rate *", "clear _lr_to_coeff after applied gradient self._lr_to_coeff = dict() return optimize_ops", "inputs['Beta2Tensor'] = self._beta2 else: attrs['beta2'] = self._beta2 if isinstance(self._epsilon, Variable):", "self._lr_ratio( param_and_grad[0]) _beta1 = self._beta1 if not isinstance( self._beta1, Variable)", "ANY KIND, either express or implied. # See the License", "parameter * coeff * lr Args: block: block in which", "limitations under the License. from .optimizer import Optimizer from .adam", "decay for the parameter. with_decay = True if self._apply_decay_param_fun is", "the License. # You may obtain a copy of the", "parameters (list|tuple, optional): List/Tuple of ``Tensor`` names to update to", "adam.clear_grad() #Note that the learning_rate of linear_2 is 0.01. linear_1", "# See the License for the specific language governing permissions", "every step, so need clear _lr_to_coeff every step, # we", "else: attrs['beta1'] = self._beta1 if isinstance(self._beta2, Variable): inputs['Beta2Tensor'] = self._beta2", "beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1', _beta1,", "= _C_ops.adamw( param_and_grad[0], param_and_grad[1], lr, moment1, moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0],", "_create_optimization_pass(self, parameters_and_grads): optimize_ops = super( AdamW, self)._create_optimization_pass(parameters_and_grads) # In dygraph", "to be created param_and_grad: (parameters, gradients) pairs, the parameters need", "1. if self._lr_ratio is None else self._lr_ratio( param_and_grad[0]) _beta1 =", "Default: None. grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an", "apply_decay_param_fun(Tensor.name)==True will be updated with weight decay. It only works", "expect epsilon >= 0.\") coeff = weight_decay if not isinstance(coeff,", "regularization failure in the Adam optimizer. .. math:: t &", "def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None,", "& = {\\beta}_2 * moment\\_2 + (1 - {\\beta}_2) *", "different semantics with the original Adam algorithm and may lead", "import base as imperative_base from collections import Callable import paddle", "0 <= beta1 < 1: raise ValueError(\"Invaild value of beta1,", "from ..fluid import core from ..fluid import framework from ..fluid.framework", "self)._create_optimization_pass(parameters_and_grads) # In dygraph mode, clear _lr_to_coeff after applied gradient", "if isinstance(self._beta1, Variable): inputs['Beta1Tensor'] = self._beta1 else: attrs['beta1'] = self._beta1", "\\lambda * param) Args: learning_rate (float|LRScheduler, optional): The learning rate", "it can be float or Tensor. The default value is", "= master_weight adamw_op = block.append_op( type=self.type, inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True)", "self._coeff self._lr_to_coeff[learning_rate] = decay_coeff find_master = (self._multi_precision and param.dtype ==", "None. grad_clip (GradientClipBase, optional): Gradient cliping strategy, it's an instance", "decay op. parameter = parameter - parameter * coeff *", "..fluid.framework import Variable from ..fluid.dygraph import base as imperative_base from", "consistent. \"\"\" if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad", "self._lr_ratio is None else self._lr_ratio( param_and_grad[0]) _beta1 = self._beta1 if", ", :ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is no gradient", "REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves the problem of L2 regularization", "core.is_compiled_with_xpu(): self.type = \"adam\" # Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "AdamW(Adam): r\"\"\" The AdamW optimizer is implemented based on the", "float or Tensor.\") self._params_name = set() self._apply_decay_param_fun = apply_decay_param_fun self._coeff", "do weight decay for the parameter. with_decay = True if", ". There are three cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm`", "data type as float32. The default value is 0.999. epsilon", "writing, software # distributed under the License is distributed on", "and parameter is not consistent. \"\"\" if isinstance(param_and_grad, dict): param_and_grad", "stop_gradient=True) return adamw_op def _create_optimization_pass(self, parameters_and_grads): optimize_ops = super( AdamW,", "so it will be much more faster. But this mode", "If it is not None, the learning rate will be", "lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict = {'coeff': coeff} self.type = \"adamw\" if", "updated with weight decay. It only works when we want", "NOTE(wangxi): In dygraph mode, apply_gradient will be executed # every", "in self._auxiliary_vars: return self._auxiliary_vars[key] else: return None def _append_decoupled_weight_decay(self, block,", "#Note that the learning_rate of linear_2 is 0.01. linear_1 =", "0.8 }], weight_decay=0.01, beta1=0.9) out.backward() adam.step() adam.clear_grad() \"\"\" def __init__(self,", "0.999. epsilon (float, optional): A small float value for numerical", "isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] = self._epsilon else: attrs['epsilon'] = self._epsilon if", "grad * grad learning\\_rate & = learning\\_rate * \\frac{\\sqrt{1 -", "0.9. beta2 (float|Tensor, optional): The exponential decay rate for the", "numerical stability. The default value is 1e-08. weight_decay (float|Tensor, optional):", "= self._multi_precision and param_and_grad[ 0].dtype == core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name]", "moment1, moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000,", "default value is 0.001. parameters (list|tuple, optional): List/Tuple of ``Tensor``", "more faster. But this mode has different semantics with the", "to decay. Raises: Exception: The type of coeff and parameter", "master_weight = (self._master_weights[param_and_grad[0].name] if find_master else None) lr = self._create_param_lr(param_and_grad)", "Args: learning_rate (float|LRScheduler, optional): The learning rate used to update", "paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW, self)._append_optimize_op(block, param_and_grad) assert isinstance(block, framework.Block)", "epsilon is not None if not 0 <= beta1 <", "+ \\lambda * param) Args: learning_rate (float|LRScheduler, optional): The learning", "super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode,", "make sure _create_param_lr() be called after # optimizer._create_global_learning_rate(). learning_rate =", "large, then the update may be very slow. The lazy", ".adam import Adam from ..fluid import core from ..fluid import", "to minimize ``loss``. \\ This parameter is required in dygraph", "mode only update the element that has gradient in current", "Otherwise, the learning rate is the original. Default: None. apply_decay_param_fun", "self._beta2 if isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] = self._epsilon else: attrs['epsilon'] =", "For more information, please refer to :ref:`api_guide_Name`. The default value", "or Tensor. The default value is 0.01. lr_ratio (function|None, optional):", "self._beta2.numpy().item(0) _, _, _, _, _ = _C_ops.adamw( param_and_grad[0], param_and_grad[1],", "'weight_decay': 0.001, 'learning_rate': 0.1, 'beta1': 0.8 }], weight_decay=0.01, beta1=0.9) out.backward()", "\"lr_ratio\": 1. if self._lr_ratio is None else self._lr_ratio(param_and_grad[0]) } if", "{\\beta}_2) * grad * grad learning\\_rate & = learning\\_rate *", "framework.in_dygraph_mode(): lr_ratio_ = 1. if self._lr_ratio is None else self._lr_ratio(", "if self._lr_ratio is None else self._lr_ratio( param_and_grad[0]) _beta1 = self._beta1", "Tensor. The default value is 0.01. lr_ratio (function|None, optional): If", "Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to achieve that. self._auxiliary_vars =", "beta1 (float|Tensor, optional): The exponential decay rate for the 1st", "lr_ratio (function|None, optional): If it is not None, the learning", "mode. And you can specify different options for \\ different", "min=-0.1, max=0.1) out = linear_1(inp) out = linear_2(out) loss =", "_auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to achieve that. self._auxiliary_vars = dict()", "= self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param, grad]), framework.name_scope('weight decay'): self._params_name.add(param.name) #", "linear(inp) loss = paddle.mean(out) beta1 = paddle.to_tensor([0.9], dtype=\"float32\") beta2 =", "# Whether we should do weight decay for the parameter.", "linear_1(inp) out = linear_2(out) loss = paddle.mean(out) adam = paddle.optimizer.AdamW(", "\"\"\" Add decoupled weight decay op. parameter = parameter -", "inp = paddle.rand([10,10], dtype=\"float32\") out = linear(inp) loss = paddle.mean(out)", "dict() if lr_ratio is not None: assert isinstance(lr_ratio, Callable) if", "if framework.in_dygraph_mode(): lr_ratio_ = 1. if self._lr_ratio is None else", "parameter groups such as the learning rate, weight decay, etc,", "[beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], } attrs = { \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\":", "attrs['beta2'] = self._beta2 if isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] = self._epsilon else:", "out = linear_1(inp) out = linear_2(out) loss = paddle.mean(out) adam", "beta1, expect beta1 in [0,1).\") if not 0 <= beta2", "or a Tensor with shape [1] and data type as", "The default value is False. multi_precision (bool, optional): Whether to", "moment\\_2 + (1 - {\\beta}_2) * grad * grad learning\\_rate", "is not consistent. \"\"\" if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad)", "there is no gradient clipping. lazy_mode (bool, optional): The official", "the size of parameter is very large, then the update", "``Tensor`` names to update to minimize ``loss``. \\ This parameter", "WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves the problem of", "return None inputs = { \"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\":", "\"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\": [moment1], \"Moment2\": [moment2],", "The AdamW optimizer is implemented based on the AdamW Optimization", "lead to different result. The default value is False. multi_precision", "* grad * grad learning\\_rate & = learning\\_rate * \\frac{\\sqrt{1", "be executed # every step, so need clear _lr_to_coeff every", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= param_and_grad # Whether we should do weight decay for", "Adam from ..fluid import core from ..fluid import framework from", "dict() return optimize_ops def __str__(self): return \" \".join([\"Weight Decay, params:\",", "loss = paddle.mean(out) adam = paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params': linear_1.parameters()", "= self._update_param_group(param_and_grad) param, grad = param_and_grad # Whether we should", "official Adam algorithm has two moving-average accumulators. The accumulators are", "learning_rate * self._coeff self._lr_to_coeff[learning_rate] = decay_coeff find_master = (self._multi_precision and", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "_beta2 = self._beta2 if not isinstance( self._beta2, Variable) else self._beta2.numpy().item(0)", "learning_rate. \\ The default value is None in static mode,", "decay_coeff is None: # NOTE(wangxi): for pipeline to set device:all", "..fluid import framework from ..fluid.framework import Variable from ..fluid.dygraph import", "None \\ and not self._apply_decay_param_fun(param.name): with_decay = False moment1 =", "block, param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW, self)._append_optimize_op(block, param_and_grad)", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "block, param_and_grad): \"\"\" Add decoupled weight decay op. parameter =", "_C_ops.adamw( param_and_grad[0], param_and_grad[1], lr, moment1, moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1,", "* decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else: scaled_param = param *", "param, but also momentum and beta_pow found_inf = self._get_auxiliary_var('found_inf') if", "self._update_param_group(param_and_grad) param, grad = param_and_grad # Whether we should do", "1st moment estimates. It should be a float number or", "Callable) if core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio' is unimplemented", "is None else self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1, Variable): inputs['Beta1Tensor'] =", "epsilon (float, optional): A small float value for numerical stability.", "Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # #", "outputs=outputs, attrs=attrs, stop_gradient=True) return adamw_op def _create_optimization_pass(self, parameters_and_grads): optimize_ops =", "no gradient clipping. lazy_mode (bool, optional): The official Adam algorithm", "parameters need to decay. Raises: Exception: The type of coeff", "param_and_grad) return super(AdamW, self)._append_optimize_op(block, param_and_grad) assert isinstance(block, framework.Block) if isinstance(param_and_grad,", "param - learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} + \\lambda *", "are list of dict. Note that the learning_rate in paramter", "dict): param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad if self._apply_decay_param_fun", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "_append_optimize_op(self, block, param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW, self)._append_optimize_op(block,", "is not None: assert isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu() or core.is_compiled_with_npu():", "The type of coeff and parameter is not consistent. \"\"\"", "Rights Reserved. # # Licensed under the Apache License, Version", "It only works when we want to specify tensors. Default:", "= param_and_grad if self._apply_decay_param_fun is not None \\ and not", "optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param, grad]), framework.name_scope('weight decay'):", "paddle.nn.Linear(10, 10) inp = paddle.rand([10,10], dtype=\"float32\") out = linear(inp) loss", "specific language governing permissions and # limitations under the License.", "False moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc", "beta2 in [0,1).\") if not 0 <= epsilon: raise ValueError(\"Invaild", "moment estimates. It should be a float number or a", "None, meaning there is no gradient clipping. lazy_mode (bool, optional):", "else: attrs['beta2'] = self._beta2 if isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] = self._epsilon", "(float, optional): A small float value for numerical stability. The", "import paddle linear = paddle.nn.Linear(10, 10) inp = paddle.rand([10,10], dtype=\"float32\")", "self._beta2 else: attrs['beta2'] = self._beta2 if isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] =", "# you may not use this file except in compliance", "minimize ``loss``. \\ This parameter is required in dygraph mode.", "def _get_auxiliary_var(self, key): if key in self._auxiliary_vars: return self._auxiliary_vars[key] else:", "learning_rate=0.1, parameters=[{ 'params': linear_1.parameters() }, { 'params': linear_2.parameters(), 'weight_decay': 0.001,", "expect beta2 in [0,1).\") if not 0 <= epsilon: raise", "Callable import paddle _C_ops = core.ops __all__ = [] class", "a float number or a Tensor with shape [1] and", "= self._beta1 if not isinstance( self._beta1, Variable) else self._beta1.numpy().item(0) _beta2", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "(function|None, optional): If it is not None, the learning rate", "0 <= beta2 < 1: raise ValueError(\"Invaild value of beta2,", "please refer to :ref:`api_guide_Name`. The default value is None. **Notes**:", "do this in _create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate, None) if decay_coeff", "not None: assert isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"\"\" if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad =", "permissions and # limitations under the License. from .optimizer import", "lr_ratio_) return None inputs = { \"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]],", "the learning rate, weight decay, etc, \\ then the parameters", "will be reused. # NOTE(wangxi): In dygraph mode, apply_gradient will", "groups \\ represents the scale of base learning_rate. \\ The", "dtype=\"float32\") out = linear(inp) loss = paddle.mean(out) beta1 = paddle.to_tensor([0.9],", "if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW, self)._append_optimize_op(block, param_and_grad) assert isinstance(block,", "= weight_decay if not isinstance(coeff, float) and \\ not isinstance(coeff,", "* (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} + \\lambda * param) Args: learning_rate", "base learning_rate. \\ The default value is None in static", "moving-average accumulators. The accumulators are updated at every step. Every", "dygraph mode, clear _lr_to_coeff after applied gradient self._lr_to_coeff = dict()", "beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict = {'coeff':", "required in dygraph mode. And you can specify different options", "and not self._apply_decay_param_fun(param.name): with_decay = False moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0])", "None assert beta2 is not None assert epsilon is not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "return super(AdamW, self)._append_optimize_op(block, param_and_grad) assert isinstance(block, framework.Block) if isinstance(param_and_grad, dict):", "is required in dygraph mode. And you can specify different", "out.backward() adam.step() adam.clear_grad() #Note that the learning_rate of linear_2 is", "of base learning_rate. \\ The default value is None in", "class of ``GradientClipBase`` . There are three cliping strategies (", "decay rate for the 2nd moment estimates. It should be", "{'coeff': coeff} self.type = \"adamw\" if core.is_compiled_with_xpu(): self.type = \"adam\"", "update the element that has gradient in current mini-batch, so", "1.0 - learning_rate * self._coeff self._lr_to_coeff[learning_rate] = decay_coeff find_master =", "accumulators. The accumulators are updated at every step. Every element", "None def _append_decoupled_weight_decay(self, block, param_and_grad): \"\"\" Add decoupled weight decay", "not None assert epsilon is not None if not 0", "lr_ratio is not None: assert isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu() or", "= self._beta1 if isinstance(self._beta2, Variable): inputs['Beta2Tensor'] = self._beta2 else: attrs['beta2']", "as float32. The default value is 0.999. epsilon (float, optional):", "in [0,1).\") if not 0 <= beta2 < 1: raise", "param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master = self._multi_precision and param_and_grad[", "The learning rate used to update ``Parameter``. It can be", "clipping. lazy_mode (bool, optional): The official Adam algorithm has two", "in current mini-batch, so it will be much more faster.", "with_decay = False moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str,", "support sparse parameter optimization.** Examples: .. code-block:: python import paddle", "Default is false. name (str, optional): Normally there is no", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Authors. All Rights Reserved. # # Licensed under the Apache", "if self._lr_ratio is None else self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1, Variable):", "else: return None def _append_decoupled_weight_decay(self, block, param_and_grad): \"\"\" Add decoupled", "be a float number or a Tensor with shape [1]", "paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else: scaled_param = param * decay_coeff paddle.fluid.layers.assign(input=scaled_param,", "parameter is not consistent. \"\"\" if isinstance(param_and_grad, dict): param_and_grad =", "outputs[\"MasterParamOut\"] = master_weight adamw_op = block.append_op( type=self.type, inputs=inputs, outputs=outputs, attrs=attrs,", "op. parameter = parameter - parameter * coeff * lr", "etc, \\ then the parameters are list of dict. Note", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "# limitations under the License. from .optimizer import Optimizer from", "code-block:: python import paddle linear = paddle.nn.Linear(10, 10) inp =", "update may be very slow. The lazy mode only update", "XPU and NPU\") self._lr_ratio = lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters,", "epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False, name=None): assert", "name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict = {'coeff': coeff} self.type = \"adamw\"", "..fluid.dygraph import base as imperative_base from collections import Callable import", "not only param, but also momentum and beta_pow found_inf =", "beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict = {'coeff': coeff}", "slow. The lazy mode only update the element that has", "= super( AdamW, self)._create_optimization_pass(parameters_and_grads) # In dygraph mode, clear _lr_to_coeff", "float): learning_rate = self._learning_rate else: # NOTE. We add this", "[0,1).\") if not 0 <= epsilon: raise ValueError(\"Invaild value of", "at every step. Every element of the two moving-average is", "\"'lr_ratio' is unimplemented in XPU and NPU\") self._lr_ratio = lr_ratio", "in paper `DECOUPLED WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves", "_create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate, None) if decay_coeff is None: #", "None if not 0 <= beta1 < 1: raise ValueError(\"Invaild", "NOTE. We add this function to the _append_optimize_op(), # for", "import Optimizer from .adam import Adam from ..fluid import core", "self._lr_ratio(param_and_grad[0]) } if isinstance(self._beta1, Variable): inputs['Beta1Tensor'] = self._beta1 else: attrs['beta1']", "- {beta}_1^t} param\\_out & = param - learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2}", "faster. But this mode has different semantics with the original", "base as imperative_base from collections import Callable import paddle _C_ops", "in the Adam optimizer. .. math:: t & = t", "paddle.nn.Linear(10, 10) linear_2 = paddle.nn.Linear(10, 10) inp = paddle.uniform(shape=[10, 10],", "None assert epsilon is not None if not 0 <=", ":ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there", "so need clear _lr_to_coeff every step, # we do this", "value is False. multi_precision (bool, optional): Whether to use multi-precision", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "the _append_optimize_op(), # for we must make sure _create_param_lr() be", "the AdamW Optimization in paper `DECOUPLED WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_.", "cliping strategy, it's an instance of some derived class of", "{ \"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\": [moment1], \"Moment2\":", "+ 1 moment\\_1\\_out & = {\\beta}_1 * moment\\_1 + (1", "float number or a Tensor with shape [1] and data", "weight decay, etc, \\ then the parameters are list of", "output=param) def _append_optimize_op(self, block, param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return", "paper `DECOUPLED WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves the", "set() self._apply_decay_param_fun = apply_decay_param_fun self._coeff = coeff self._lr_to_coeff = dict()", "# Pass found_inf to adamw, to skip update for not", "self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master, \"with_decay\": with_decay, \"coeff\": self._coeff, \"lr_ratio\":", "and NPU\") self._lr_ratio = lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1,", "weight_decay (float|Tensor, optional): The weight decay coefficient, it can be", "= lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip,", "self._beta1.numpy().item(0) _beta2 = self._beta2 if not isinstance( self._beta2, Variable) else", "core.VarDesc.VarType.FP16 master_weight = (self._master_weights[param_and_grad[0].name] if find_master else None) lr =", "= paddle.nn.Linear(10, 10) inp = paddle.uniform(shape=[10, 10], min=-0.1, max=0.1) out", "decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self, block, param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block,", "learning_rate (float|LRScheduler, optional): The learning rate used to update ``Parameter``.", "user to set this property. For more information, please refer", "(float|Tensor, optional): The exponential decay rate for the 1st moment", "together with _set_auxiliary_var/_get_auxiliary_var to achieve that. self._auxiliary_vars = dict() def", "= paddle.to_tensor([0.99], dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01)", "self._lr_to_coeff[learning_rate] = decay_coeff find_master = (self._multi_precision and param.dtype == core.VarDesc.VarType.FP16)", "some derived class of ``GradientClipBase`` . There are three cliping", "more information, please refer to :ref:`api_guide_Name`. The default value is", "linear_1 = paddle.nn.Linear(10, 10) linear_2 = paddle.nn.Linear(10, 10) inp =", "{\\beta}_1 * moment\\_1 + (1 - {\\beta}_1) * grad moemnt\\_2\\_out", "moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1, moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon,", "= master_weight outputs[\"MasterParamOut\"] = master_weight adamw_op = block.append_op( type=self.type, inputs=inputs,", "is None. **Notes**: **Currently, AdamW doesn't support sparse parameter optimization.**", "coeff = weight_decay if not isinstance(coeff, float) and \\ not", "{\\beta}_2 * moment\\_2 + (1 - {\\beta}_2) * grad *", "= { \"ParamOut\": [param_and_grad[0]], \"Moment1Out\": [moment1], \"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc],", "**Notes**: **Currently, AdamW doesn't support sparse parameter optimization.** Examples: ..", "mode, apply_gradient will be executed # every step, so need", "self._default_dict = {'coeff': coeff} self.type = \"adamw\" if core.is_compiled_with_xpu(): self.type", "None assert beta1 is not None assert beta2 is not", "It can be a float value or a LRScheduler. The", "is unimplemented in XPU and NPU\") self._lr_ratio = lr_ratio super(AdamW,", "use this file except in compliance with the License. #", "{beta}_1^t} param\\_out & = param - learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} +", "= \"adam\" # Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to achieve", "10) inp = paddle.uniform(shape=[10, 10], min=-0.1, max=0.1) out = linear_1(inp)", "updated in both dense mode and sparse mode. If the", "self._beta2, Variable) else self._beta2.numpy().item(0) _, _, _, _, _ =", "= self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str,", "all parameters will be updated. beta1 (float|Tensor, optional): The exponential", "decoupled weight decay op. parameter = parameter - parameter *", "AdamW, self)._create_optimization_pass(parameters_and_grads) # In dygraph mode, clear _lr_to_coeff after applied", "None. **Notes**: **Currently, AdamW doesn't support sparse parameter optimization.** Examples:", "linear_2 is 0.01. linear_1 = paddle.nn.Linear(10, 10) linear_2 = paddle.nn.Linear(10,", ".. math:: t & = t + 1 moment\\_1\\_out &", "else None) lr = self._create_param_lr(param_and_grad) # create the adamw optimize", "L2 regularization failure in the Adam optimizer. .. math:: t", "def _append_optimize_op(self, block, param_and_grad): if paddle.is_compiled_with_xpu(): self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW,", "derived class of ``GradientClipBase`` . There are three cliping strategies", "not self._apply_decay_param_fun(param.name): with_decay = False moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2", "param\\_out & = param - learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon}", "= self._beta2 if not isinstance( self._beta2, Variable) else self._beta2.numpy().item(0) _,", "both dense mode and sparse mode. If the size of", "None: # NOTE(wangxi): for pipeline to set device:all with paddle.static.device_guard(None):", "if not 0 <= beta1 < 1: raise ValueError(\"Invaild value", "grad moemnt\\_2\\_out & = {\\beta}_2 * moment\\_2 + (1 -", "scaled_param = master_weight * decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else: scaled_param", "# every step, so need clear _lr_to_coeff every step, #", "(parameters, gradients) pairs, the parameters need to decay. Raises: Exception:", "\\frac{\\sqrt{1 - {\\beta}_2^t}}{1 - {beta}_1^t} param\\_out & = param -", "\"adamw\" if core.is_compiled_with_xpu(): self.type = \"adam\" # Use _auxiliary_vars together", "in compliance with the License. # You may obtain a", "= learning\\_rate * \\frac{\\sqrt{1 - {\\beta}_2^t}}{1 - {beta}_1^t} param\\_out &", "software # distributed under the License is distributed on an", "import paddle _C_ops = core.ops __all__ = [] class AdamW(Adam):", "rate ratio. Otherwise, the learning rate is the original. Default:", "only tensors that makes apply_decay_param_fun(Tensor.name)==True will be updated with weight", "three cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ).", "learning_rate = self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param, grad]), framework.name_scope('weight decay'): self._params_name.add(param.name)", "= paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward() adam.step() adam.clear_grad() #Note", "- {\\beta}_1) * grad moemnt\\_2\\_out & = {\\beta}_2 * moment\\_2", "[param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\": [moment1], \"Moment2\": [moment2], \"Beta1Pow\":", "which variable is to be created param_and_grad: (parameters, gradients) pairs,", "every step. Every element of the two moving-average is updated", "is 0.999. epsilon (float, optional): A small float value for", "max=0.1) out = linear_1(inp) out = linear_2(out) loss = paddle.mean(out)", "= decay_coeff find_master = (self._multi_precision and param.dtype == core.VarDesc.VarType.FP16) if", "update for not only param, but also momentum and beta_pow", "None: assert isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise NotImplementedError(", "failure in the Adam optimizer. .. math:: t & =", "\\ and not self._apply_decay_param_fun(param.name): with_decay = False moment1 = self._get_accumulator(self._moment1_acc_str,", "- {\\beta}_2^t}}{1 - {beta}_1^t} param\\_out & = param - learning\\_rate", "paddle.mean(out) adam = paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params': linear_1.parameters() }, {", "[beta2_pow_acc], } attrs = { \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\":", "add this function to the _append_optimize_op(), # for we must", "that makes apply_decay_param_fun(Tensor.name)==True will be updated with weight decay. It", "find_master, \"with_decay\": with_decay, \"coeff\": self._coeff, \"lr_ratio\": 1. if self._lr_ratio is", ", :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ). Default None, meaning there is", "if not isinstance( self._beta2, Variable) else self._beta2.numpy().item(0) _, _, _,", "calculated, the result will be reused. # NOTE(wangxi): In dygraph", "such as the learning rate, weight decay, etc, \\ then", "= paddle.nn.Linear(10, 10) inp = paddle.rand([10,10], dtype=\"float32\") out = linear(inp)", "The accumulators are updated at every step. Every element of", "that has gradient in current mini-batch, so it will be", "with the License. # You may obtain a copy of", "a LRScheduler. The default value is 0.001. parameters (list|tuple, optional):", "self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc = self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0])", "step, # we do this in _create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate,", "we do this in _create_optimization_pass decay_coeff = self._lr_to_coeff.get(learning_rate, None) if", "exponential decay rate for the 1st moment estimates. It should", "for the 2nd moment estimates. It should be a float", "the adamw optimize op if framework.in_dygraph_mode(): lr_ratio_ = 1. if", "The default value is 0.9. beta2 (float|Tensor, optional): The exponential", "Default None, meaning there is no gradient clipping. lazy_mode (bool,", "'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1', _beta1, 'beta2', _beta2,", "this property. For more information, please refer to :ref:`api_guide_Name`. The", "decay_coeff = self._lr_to_coeff.get(learning_rate, None) if decay_coeff is None: # NOTE(wangxi):", "key, val): self._auxiliary_vars[key] = val def _get_auxiliary_var(self, key): if key", "param_and_grad[1], lr, moment1, moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1, moment2, beta1_pow_acc,", "can be float or Tensor. The default value is 0.01.", "express or implied. # See the License for the specific", "parameter = parameter - parameter * coeff * lr Args:", "except in compliance with the License. # You may obtain", "The exponential decay rate for the 1st moment estimates. It", "(str, optional): Normally there is no need for user to", "framework.Block) if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad =", "float) and \\ not isinstance(coeff, framework.Variable): raise TypeError(\"coeff should be", "\\ then the parameters are list of dict. Note that", "if lr_ratio is not None: assert isinstance(lr_ratio, Callable) if core.is_compiled_with_xpu()", "if find_master: inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"] = master_weight adamw_op =", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "moment1 = self._get_accumulator(self._moment1_acc_str, param_and_grad[0]) moment2 = self._get_accumulator(self._moment2_acc_str, param_and_grad[0]) beta1_pow_acc =", "inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True) return adamw_op def _create_optimization_pass(self, parameters_and_grads): optimize_ops", "assert beta2 is not None assert epsilon is not None", "not isinstance(coeff, float) and \\ not isinstance(coeff, framework.Variable): raise TypeError(\"coeff", "1 moment\\_1\\_out & = {\\beta}_1 * moment\\_1 + (1 -", "# In dygraph mode, clear _lr_to_coeff after applied gradient self._lr_to_coeff", "should be a float number or a Tensor with shape", "\" \".join([\"Weight Decay, params:\", \",\".join(self._params_name)]) def _update_param_group(self, parameters): self._coeff =", "CONDITIONS OF ANY KIND, either express or implied. # See", "1: raise ValueError(\"Invaild value of beta1, expect beta1 in [0,1).\")", "in XPU and NPU\") self._lr_ratio = lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate,", "Variable): inputs['EpsilonTensor'] = self._epsilon else: attrs['epsilon'] = self._epsilon if find_master:", "The default value is 1e-08. weight_decay (float|Tensor, optional): The weight", "The default value is 0.999. epsilon (float, optional): A small", "class AdamW(Adam): r\"\"\" The AdamW optimizer is implemented based on", "assert isinstance(block, framework.Block) if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param,", "out = linear(inp) loss = paddle.mean(out) beta1 = paddle.to_tensor([0.9], dtype=\"float32\")", "self._append_decoupled_weight_decay(block, param_and_grad) return super(AdamW, self)._append_optimize_op(block, param_and_grad) assert isinstance(block, framework.Block) if", "resolves the problem of L2 regularization failure in the Adam", "= parameter - parameter * coeff * lr Args: block:", "or a LRScheduler. The default value is 0.001. parameters (list|tuple,", "in paramter groups \\ represents the scale of base learning_rate.", "= { \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master, \"with_decay\": with_decay,", "= paddle.to_tensor([0.9], dtype=\"float32\") beta2 = paddle.to_tensor([0.99], dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1,", "return None def _append_decoupled_weight_decay(self, block, param_and_grad): \"\"\" Add decoupled weight", "not None assert beta2 is not None assert epsilon is", "params:\", \",\".join(self._params_name)]) def _update_param_group(self, parameters): self._coeff = parameters.get('coeff', self._default_dict['coeff']) parameters", "== core.VarDesc.VarType.FP16) if find_master: master_weight = self._master_weights[param.name] scaled_param = master_weight", "def _create_optimization_pass(self, parameters_and_grads): optimize_ops = super( AdamW, self)._create_optimization_pass(parameters_and_grads) # In", "import Callable import paddle _C_ops = core.ops __all__ = []", "float or Tensor. The default value is 0.01. lr_ratio (function|None,", "when we want to specify tensors. Default: None. grad_clip (GradientClipBase,", "with the original Adam algorithm and may lead to different", "meaning there is no gradient clipping. lazy_mode (bool, optional): The", "paramter groups \\ represents the scale of base learning_rate. \\", "'beta2', _beta2, 'coeff', self._coeff, \"lr_ratio\", lr_ratio_) return None inputs =", "it has been calculated, the result will be reused. #", "0.001, 'learning_rate': 0.1, 'beta1': 0.8 }], weight_decay=0.01, beta1=0.9) out.backward() adam.step()", "set this property. For more information, please refer to :ref:`api_guide_Name`.", "0.001. parameters (list|tuple, optional): List/Tuple of ``Tensor`` names to update", "= self._get_accumulator(self._beta1_pow_acc_str, param_and_grad[0]) beta2_pow_acc = self._get_accumulator(self._beta2_pow_acc_str, param_and_grad[0]) find_master = self._multi_precision", "_, _, _ = _C_ops.adamw( param_and_grad[0], param_and_grad[1], lr, moment1, moment2,", "for user to set this property. For more information, please", "import framework from ..fluid.framework import Variable from ..fluid.dygraph import base", "if isinstance(self._beta2, Variable): inputs['Beta2Tensor'] = self._beta2 else: attrs['beta2'] = self._beta2", "self)._append_optimize_op(block, param_and_grad) assert isinstance(block, framework.Block) if isinstance(param_and_grad, dict): param_and_grad =", "strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue` ). Default None,", "lr_ratio_ = 1. if self._lr_ratio is None else self._lr_ratio( param_and_grad[0])", "find_master: inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"] = master_weight adamw_op = block.append_op(", "value is 0.01. lr_ratio (function|None, optional): If it is not", "core.is_compiled_with_xpu() or core.is_compiled_with_npu(): raise NotImplementedError( \"'lr_ratio' is unimplemented in XPU", "sparse mode. If the size of parameter is very large,", "of ``GradientClipBase`` . There are three cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm`", "with weight decay. It only works when we want to", "optional): Whether to use multi-precision during weight updating. Default is", "need clear _lr_to_coeff every step, # we do this in", "not consistent. \"\"\" if isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param,", "variable is to be created param_and_grad: (parameters, gradients) pairs, the", "or Tensor.\") self._params_name = set() self._apply_decay_param_fun = apply_decay_param_fun self._coeff =", "it is not None, only tensors that makes apply_decay_param_fun(Tensor.name)==True will", "\\epsilon} + \\lambda * param) Args: learning_rate (float|LRScheduler, optional): The", "framework.name_scope('weight decay'): self._params_name.add(param.name) # If it has been calculated, the", "and param.dtype == core.VarDesc.VarType.FP16) if find_master: master_weight = self._master_weights[param.name] scaled_param", "scale of base learning_rate. \\ The default value is None", "learning rate used to update ``Parameter``. It can be a", "but also momentum and beta_pow found_inf = self._get_auxiliary_var('found_inf') if found_inf:", "as float32. The default value is 0.9. beta2 (float|Tensor, optional):", "grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict = {'coeff': coeff} self.type =", "lr = self._create_param_lr(param_and_grad) # create the adamw optimize op if", "param_and_grad[0], param_and_grad[1], lr, moment1, moment2, beta1_pow_acc, beta2_pow_acc, param_and_grad[0], moment1, moment2,", "decay_coeff find_master = (self._multi_precision and param.dtype == core.VarDesc.VarType.FP16) if find_master:", "groups such as the learning rate, weight decay, etc, \\", "are three cliping strategies ( :ref:`api_fluid_clip_GradientClipByGlobalNorm` , :ref:`api_fluid_clip_GradientClipByNorm` , :ref:`api_fluid_clip_GradientClipByValue`", "is None else self._lr_ratio( param_and_grad[0]) _beta1 = self._beta1 if not", "inputs = { \"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\":", "original. Default: None. apply_decay_param_fun (function|None, optional): If it is not", "type of coeff and parameter is not consistent. \"\"\" if", "_ = _C_ops.adamw( param_and_grad[0], param_and_grad[1], lr, moment1, moment2, beta1_pow_acc, beta2_pow_acc,", "paddle.uniform(shape=[10, 10], min=-0.1, max=0.1) out = linear_1(inp) out = linear_2(out)", "import Variable from ..fluid.dygraph import base as imperative_base from collections", "update ``Parameter``. It can be a float value or a", "apply_decay_param_fun self._coeff = coeff self._lr_to_coeff = dict() if lr_ratio is", "_, _ = _C_ops.adamw( param_and_grad[0], param_and_grad[1], lr, moment1, moment2, beta1_pow_acc,", "param, grad = param_and_grad # Whether we should do weight", "= set() self._apply_decay_param_fun = apply_decay_param_fun self._coeff = coeff self._lr_to_coeff =", "_set_auxiliary_var/_get_auxiliary_var to achieve that. self._auxiliary_vars = dict() def _set_auxiliary_var(self, key,", "for numerical stability. The default value is 1e-08. weight_decay (float|Tensor,", "is 1e-08. weight_decay (float|Tensor, optional): The weight decay coefficient, it", "the learning rate will be updated with layerwise learning rate", "op if framework.in_dygraph_mode(): lr_ratio_ = 1. if self._lr_ratio is None", "if find_master: master_weight = self._master_weights[param.name] scaled_param = master_weight * decay_coeff", "of the two moving-average is updated in both dense mode", "\"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master, \"with_decay\": with_decay, \"coeff\": self._coeff, \"lr_ratio\": 1.", "layerwise learning rate ratio. Otherwise, the learning rate is the", "learning rate ratio. Otherwise, the learning rate is the original.", "is the original. Default: None. apply_decay_param_fun (function|None, optional): If it", "core.VarDesc.VarType.FP16) if find_master: master_weight = self._master_weights[param.name] scaled_param = master_weight *", "key in self._auxiliary_vars: return self._auxiliary_vars[key] else: return None def _append_decoupled_weight_decay(self,", "weight_decay=0.01) out.backward() adam.step() adam.clear_grad() #Note that the learning_rate of linear_2", "\"adam\" # Use _auxiliary_vars together with _set_auxiliary_var/_get_auxiliary_var to achieve that.", "= val def _get_auxiliary_var(self, key): if key in self._auxiliary_vars: return", "\"multi_precision\": find_master, \"with_decay\": with_decay, \"coeff\": self._coeff, \"lr_ratio\": 1. if self._lr_ratio", "is implemented based on the AdamW Optimization in paper `DECOUPLED", "of coeff and parameter is not consistent. \"\"\" if isinstance(param_and_grad,", "not 0 <= epsilon: raise ValueError(\"Invaild value of epsilon, expect", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "False. multi_precision (bool, optional): Whether to use multi-precision during weight", "= \"adamw\" if core.is_compiled_with_xpu(): self.type = \"adam\" # Use _auxiliary_vars", "(float|LRScheduler, optional): The learning rate used to update ``Parameter``. It", "to :ref:`api_guide_Name`. The default value is None. **Notes**: **Currently, AdamW", "math:: t & = t + 1 moment\\_1\\_out & =", "framework.Variable): raise TypeError(\"coeff should be float or Tensor.\") self._params_name =", "\"lr_ratio\", lr_ratio_) return None inputs = { \"Param\": [param_and_grad[0]], \"Grad\":", "< 1: raise ValueError(\"Invaild value of beta2, expect beta2 in", "linear = paddle.nn.Linear(10, 10) inp = paddle.rand([10,10], dtype=\"float32\") out =", "with _set_auxiliary_var/_get_auxiliary_var to achieve that. self._auxiliary_vars = dict() def _set_auxiliary_var(self,", "weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False, name=None): assert learning_rate is", "parameters=[{ 'params': linear_1.parameters() }, { 'params': linear_2.parameters(), 'weight_decay': 0.001, 'learning_rate':", "rate will be updated with layerwise learning rate ratio. Otherwise,", "ValueError(\"Invaild value of beta2, expect beta2 in [0,1).\") if not", "= (self._master_weights[param_and_grad[0].name] if find_master else None) lr = self._create_param_lr(param_and_grad) #", "None) if decay_coeff is None: # NOTE(wangxi): for pipeline to", "epsilon >= 0.\") coeff = weight_decay if not isinstance(coeff, float)", "[lr], \"Moment1\": [moment1], \"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], }", "decay, etc, \\ then the parameters are list of dict.", "\"Moment2Out\": [moment2], \"Beta1PowOut\": [beta1_pow_acc], \"Beta2PowOut\": [beta2_pow_acc], } attrs = {", "device:all with paddle.static.device_guard(None): decay_coeff = 1.0 - learning_rate * self._coeff", "the result will be reused. # NOTE(wangxi): In dygraph mode,", "param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad if self._apply_decay_param_fun is", "(float|Tensor, optional): The weight decay coefficient, it can be float", "is False. multi_precision (bool, optional): Whether to use multi-precision during", "input=scaled_param, output=master_weight) else: scaled_param = param * decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param)", "None. apply_decay_param_fun (function|None, optional): If it is not None, only", "be updated with layerwise learning rate ratio. Otherwise, the learning", "beta1 = paddle.to_tensor([0.9], dtype=\"float32\") beta2 = paddle.to_tensor([0.99], dtype=\"float32\") adam =", "master_weight adamw_op = block.append_op( type=self.type, inputs=inputs, outputs=outputs, attrs=attrs, stop_gradient=True) return", "Version 2.0 (the \"License\"); # you may not use this", "= paddle.optimizer.AdamW( learning_rate=0.1, parameters=[{ 'params': linear_1.parameters() }, { 'params': linear_2.parameters(),", "self._lr_to_coeff.get(learning_rate, None) if decay_coeff is None: # NOTE(wangxi): for pipeline", "will be updated with weight decay. It only works when", "* decay_coeff paddle.fluid.layers.assign(input=scaled_param, output=param) def _append_optimize_op(self, block, param_and_grad): if paddle.is_compiled_with_xpu():", "options for \\ different parameter groups such as the learning", "will be much more faster. But this mode has different", "isinstance(param_and_grad, dict): param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad #", "find_master = (self._multi_precision and param.dtype == core.VarDesc.VarType.FP16) if find_master: master_weight", "value or a LRScheduler. The default value is 0.001. parameters", "can resolves the problem of L2 regularization failure in the", "The weight decay coefficient, it can be float or Tensor.", "to achieve that. self._auxiliary_vars = dict() def _set_auxiliary_var(self, key, val):", "by applicable law or agreed to in writing, software #", "= paddle.rand([10,10], dtype=\"float32\") out = linear(inp) loss = paddle.mean(out) beta1", "false. name (str, optional): Normally there is no need for", "else: attrs['epsilon'] = self._epsilon if find_master: inputs[\"MasterParam\"] = master_weight outputs[\"MasterParamOut\"]", "def __str__(self): return \" \".join([\"Weight Decay, params:\", \",\".join(self._params_name)]) def _update_param_group(self,", "the learning_rate of linear_2 is 0.01. linear_1 = paddle.nn.Linear(10, 10)", "super(AdamW, self)._append_optimize_op(block, param_and_grad) assert isinstance(block, framework.Block) if isinstance(param_and_grad, dict): param_and_grad", "= 1.0 - learning_rate * self._coeff self._lr_to_coeff[learning_rate] = decay_coeff find_master", "param_and_grad[0], moment1, moment2, beta1_pow_acc, beta2_pow_acc, 'epsilon', self._epsilon, 'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread',", "to specify tensors. Default: None. grad_clip (GradientClipBase, optional): Gradient cliping", "if find_master else None) lr = self._create_param_lr(param_and_grad) # create the", "grad = param_and_grad if self._apply_decay_param_fun is not None \\ and", "self._beta1 else: attrs['beta1'] = self._beta1 if isinstance(self._beta2, Variable): inputs['Beta2Tensor'] =", "paddle.to_tensor([0.99], dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward()", "unimplemented in XPU and NPU\") self._lr_ratio = lr_ratio super(AdamW, self).__init__(", "10) inp = paddle.rand([10,10], dtype=\"float32\") out = linear(inp) loss =", "= { \"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\": [moment1],", "& = t + 1 moment\\_1\\_out & = {\\beta}_1 *", "\"Beta2PowOut\": [beta2_pow_acc], } attrs = { \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000,", "super( AdamW, self)._create_optimization_pass(parameters_and_grads) # In dygraph mode, clear _lr_to_coeff after", "lr Args: block: block in which variable is to be", "out.backward() adam.step() adam.clear_grad() \"\"\" def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8,", "value is 1e-08. weight_decay (float|Tensor, optional): The weight decay coefficient,", "* grad moemnt\\_2\\_out & = {\\beta}_2 * moment\\_2 + (1", "AdamW optimizer is implemented based on the AdamW Optimization in", "_C_ops = core.ops __all__ = [] class AdamW(Adam): r\"\"\" The", "= True if self._apply_decay_param_fun is not None \\ and not", "beta1 in [0,1).\") if not 0 <= beta2 < 1:", "lr_ratio super(AdamW, self).__init__( learning_rate=learning_rate, parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name,", "for the parameter. with_decay = True if self._apply_decay_param_fun is not", "= linear_1(inp) out = linear_2(out) loss = paddle.mean(out) adam =", "- learning\\_rate * (\\frac{moment\\_1}{\\sqrt{moment\\_2} + \\epsilon} + \\lambda * param)", "[moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\": [beta2_pow_acc], } # Pass found_inf to", "(bool, optional): The official Adam algorithm has two moving-average accumulators.", "applicable law or agreed to in writing, software # distributed", "weight_decay=0.01, beta1=0.9) out.backward() adam.step() adam.clear_grad() \"\"\" def __init__(self, learning_rate=0.001, beta1=0.9,", "PaddlePaddle Authors. All Rights Reserved. # # Licensed under the", "} attrs = { \"lazy_mode\": self._lazy_mode, \"min_row_size_to_use_multithread\": 1000, \"multi_precision\": find_master,", "parameters=linear.parameters(), beta1=beta1, beta2=beta2, weight_decay=0.01) out.backward() adam.step() adam.clear_grad() #Note that the", "Variable): inputs['Beta1Tensor'] = self._beta1 else: attrs['beta1'] = self._beta1 if isinstance(self._beta2,", "paddle.mean(out) beta1 = paddle.to_tensor([0.9], dtype=\"float32\") beta2 = paddle.to_tensor([0.99], dtype=\"float32\") adam", "None) lr = self._create_param_lr(param_and_grad) # create the adamw optimize op", "self._master_weights[param.name] scaled_param = master_weight * decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight) else:", "self._apply_decay_param_fun is not None \\ and not self._apply_decay_param_fun(param.name): with_decay =", "assert learning_rate is not None assert beta1 is not None", "[param_and_grad[1]], \"LearningRate\": [lr], \"Moment1\": [moment1], \"Moment2\": [moment2], \"Beta1Pow\": [beta1_pow_acc], \"Beta2Pow\":", "__init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None,", "after # optimizer._create_global_learning_rate(). learning_rate = self._create_param_lr(param_and_grad) with block.program._optimized_guard( [param, grad]),", "self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1', _beta1, 'beta2', _beta2, 'coeff', self._coeff, \"lr_ratio\",", "for we must make sure _create_param_lr() be called after #", "then the parameters are list of dict. Note that the", "parameters=parameters, beta1=beta1, beta2=beta2, epsilon=epsilon, grad_clip=grad_clip, name=name, lazy_mode=lazy_mode, multi_precision=multi_precision) self._default_dict =", "decay. Raises: Exception: The type of coeff and parameter is", "None \\ and not self._apply_decay_param_fun(param.name): return if isinstance(self._learning_rate, float): learning_rate", "assert epsilon is not None if not 0 <= beta1", "are updated at every step. Every element of the two", ":ref:`api_guide_Name`. The default value is None. **Notes**: **Currently, AdamW doesn't", "= dict() return optimize_ops def __str__(self): return \" \".join([\"Weight Decay,", "not 0 <= beta1 < 1: raise ValueError(\"Invaild value of", "'lazy_mode', self._lazy_mode, 'min_row_size_to_use_multithread', 1000, 'beta1', _beta1, 'beta2', _beta2, 'coeff', self._coeff,", "# You may obtain a copy of the License at", "beta1=0.9, beta2=0.999, epsilon=1e-8, parameters=None, weight_decay=0.01, lr_ratio=None, apply_decay_param_fun=None, grad_clip=None, lazy_mode=False, multi_precision=False,", "optimization.** Examples: .. code-block:: python import paddle linear = paddle.nn.Linear(10,", "if isinstance(self._epsilon, Variable): inputs['EpsilonTensor'] = self._epsilon else: attrs['epsilon'] = self._epsilon", "that. self._auxiliary_vars = dict() def _set_auxiliary_var(self, key, val): self._auxiliary_vars[key] =", "If it has been calculated, the result will be reused.", "The default value is 0.01. lr_ratio (function|None, optional): If it", "= self._master_weights[param.name] scaled_param = master_weight * decay_coeff paddle.fluid.layers.assign( input=scaled_param, output=master_weight)", "be updated. beta1 (float|Tensor, optional): The exponential decay rate for", "will be updated with layerwise learning rate ratio. Otherwise, the", "beta2 is not None assert epsilon is not None if", "step. Every element of the two moving-average is updated in", "def _append_decoupled_weight_decay(self, block, param_and_grad): \"\"\" Add decoupled weight decay op.", "it can resolves the problem of L2 regularization failure in", "mode, at this time all parameters will be updated. beta1", "This parameter is required in dygraph mode. And you can", "to skip update for not only param, but also momentum", "coefficient, it can be float or Tensor. The default value", "makes apply_decay_param_fun(Tensor.name)==True will be updated with weight decay. It only", "`DECOUPLED WEIGHT DECAY REGULARIZATION <https://arxiv.org/pdf/1711.05101.pdf>`_. it can resolves the problem", "this mode has different semantics with the original Adam algorithm", "be reused. # NOTE(wangxi): In dygraph mode, apply_gradient will be", "Every element of the two moving-average is updated in both", "\"License\"); # you may not use this file except in", "reused. # NOTE(wangxi): In dygraph mode, apply_gradient will be executed", "not None \\ and not self._apply_decay_param_fun(param.name): with_decay = False moment1", "None inputs = { \"Param\": [param_and_grad[0]], \"Grad\": [param_and_grad[1]], \"LearningRate\": [lr],", "and sparse mode. If the size of parameter is very", "isinstance(self._beta1, Variable): inputs['Beta1Tensor'] = self._beta1 else: attrs['beta1'] = self._beta1 if", "else self._beta2.numpy().item(0) _, _, _, _, _ = _C_ops.adamw( param_and_grad[0],", "grad = param_and_grad # Whether we should do weight decay", "weight decay coefficient, it can be float or Tensor. The", "Variable from ..fluid.dygraph import base as imperative_base from collections import", "else: # NOTE. We add this function to the _append_optimize_op(),", "two moving-average accumulators. The accumulators are updated at every step.", "_lr_to_coeff after applied gradient self._lr_to_coeff = dict() return optimize_ops def", "the 1st moment estimates. It should be a float number", "paddle.static.device_guard(None): decay_coeff = 1.0 - learning_rate * self._coeff self._lr_to_coeff[learning_rate] =", "isinstance(coeff, framework.Variable): raise TypeError(\"coeff should be float or Tensor.\") self._params_name", "= self._learning_rate else: # NOTE. We add this function to", "& = learning\\_rate * \\frac{\\sqrt{1 - {\\beta}_2^t}}{1 - {beta}_1^t} param\\_out", "**Currently, AdamW doesn't support sparse parameter optimization.** Examples: .. code-block::", "paddle.to_tensor([0.9], dtype=\"float32\") beta2 = paddle.to_tensor([0.99], dtype=\"float32\") adam = paddle.optimizer.AdamW(learning_rate=0.1, parameters=linear.parameters(),", "exponential decay rate for the 2nd moment estimates. It should", "dict): param_and_grad = self._update_param_group(param_and_grad) param, grad = param_and_grad # Whether", "self._epsilon else: attrs['epsilon'] = self._epsilon if find_master: inputs[\"MasterParam\"] = master_weight", "The exponential decay rate for the 2nd moment estimates. It" ]
[ "Gene does not exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = {", "self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = { \"wasSuccessful\": False, \"error\": \"There are no", "from api import app from unittest import TestCase class TestIntegrations(TestCase):", "\"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": -0.116,", "\"error\": \"Invalid species or gene ID\"} self.assertEqual(response.json, expected) # Invalid", "\"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": 0.65,", "species' genes. \"\"\" # Valid request rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\")", "species response = self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid", "class TestIntegrations(TestCase): maxDiff = None def setUp(self): self.app_client = app.test_client()", "\"Num_species\": 1, \"Quality\": 1, \"pcc\": -0.116, }, ], } self.assertEqual(response.json,", "gene ID\"} self.assertEqual(response.json, expected) # Gene does not exist response", "unittest import TestCase class TestIntegrations(TestCase): maxDiff = None def setUp(self):", "api import app from unittest import TestCase class TestIntegrations(TestCase): maxDiff", "}, { \"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\": 1,", "True, \"data\": [ { \"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1,", "# Gene does not exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected =", "not exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = { \"wasSuccessful\": False,", "\"Invalid species or gene ID\"} self.assertEqual(response.json, expected) # Gene does", "self.assertEqual(response.json, expected) # Gene does not exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\")", "= { \"wasSuccessful\": False, \"error\": \"There are no data found", "expected = { \"wasSuccessful\": False, \"error\": \"There are no data", "\"Quality\": 1, \"pcc\": -0.116, }, ], } self.assertEqual(response.json, expected) #", "import TestCase class TestIntegrations(TestCase): maxDiff = None def setUp(self): self.app_client", "retrieving protein interactions for various species' genes. \"\"\" # Valid", "], } self.assertEqual(response.json, expected) # Invalid species response = self.app_client.get(\"/interactions/poplar/abc\")", "expected) # Invalid species response = self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\":", "self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid species or gene", "request rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = { \"wasSuccessful\": True,", "[ { \"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\": 1,", "\"wasSuccessful\": True, \"data\": [ { \"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\":", "# Invalid gene id response = self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\":", "= self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = { \"wasSuccessful\": True, \"data\": [ {", "\"error\": \"Invalid species or gene ID\"} self.assertEqual(response.json, expected) # Gene", "TestIntegrations(TestCase): maxDiff = None def setUp(self): self.app_client = app.test_client() def", "self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = { \"wasSuccessful\": True, \"data\": [ { \"protein_1\":", "{ \"wasSuccessful\": True, \"data\": [ { \"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\",", "response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = { \"wasSuccessful\": True, \"data\": [", "}, ], } self.assertEqual(response.json, expected) # Invalid species response =", "or gene ID\"} self.assertEqual(response.json, expected) # Invalid gene id response", "{ \"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\":", "def setUp(self): self.app_client = app.test_client() def test_get_itrns(self): \"\"\" This function", "app.test_client() def test_get_itrns(self): \"\"\" This function test retrieving protein interactions", "\"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\":", "species or gene ID\"} self.assertEqual(response.json, expected) # Invalid gene id", "expected) # Invalid gene id response = self.app_client.get(\"/interactions/rice/abc\") expected =", "rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = { \"wasSuccessful\": True, \"data\":", "# Invalid species response = self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\": False,", "ID\"} self.assertEqual(response.json, expected) # Gene does not exist response =", "expected = { \"wasSuccessful\": True, \"data\": [ { \"protein_1\": \"LOC_Os01g01080\",", "interactions for various species' genes. \"\"\" # Valid request rice", "or gene ID\"} self.assertEqual(response.json, expected) # Gene does not exist", "Invalid species response = self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\": False, \"error\":", "= app.test_client() def test_get_itrns(self): \"\"\" This function test retrieving protein", "setUp(self): self.app_client = app.test_client() def test_get_itrns(self): \"\"\" This function test", "import app from unittest import TestCase class TestIntegrations(TestCase): maxDiff =", "for various species' genes. \"\"\" # Valid request rice response", "\"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1,", "{\"wasSuccessful\": False, \"error\": \"Invalid species or gene ID\"} self.assertEqual(response.json, expected)", "-0.116, }, ], } self.assertEqual(response.json, expected) # Invalid species response", "gene ID\"} self.assertEqual(response.json, expected) # Invalid gene id response =", "test_get_itrns(self): \"\"\" This function test retrieving protein interactions for various", "self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid species or gene", "= self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = { \"wasSuccessful\": False, \"error\": \"There are", "<reponame>VinLau/BAR_API<gh_stars>1-10 from api import app from unittest import TestCase class", "\"\"\" This function test retrieving protein interactions for various species'", "gene id response = self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\": False, \"error\":", "\"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": -0.116, }, ],", "This function test retrieving protein interactions for various species' genes.", "id response = self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid", "expected) # Gene does not exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected", "1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": -0.116, }, ], }", "species or gene ID\"} self.assertEqual(response.json, expected) # Gene does not", "app from unittest import TestCase class TestIntegrations(TestCase): maxDiff = None", "} self.assertEqual(response.json, expected) # Invalid species response = self.app_client.get(\"/interactions/poplar/abc\") expected", "= self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid species or", "False, \"error\": \"There are no data found for the given", "self.assertEqual(response.json, expected) # Invalid species response = self.app_client.get(\"/interactions/poplar/abc\") expected =", "maxDiff = None def setUp(self): self.app_client = app.test_client() def test_get_itrns(self):", "= self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid species or", "1, \"Quality\": 1, \"pcc\": 0.65, }, { \"protein_1\": \"LOC_Os01g52560\", \"protein_2\":", "self.app_client = app.test_client() def test_get_itrns(self): \"\"\" This function test retrieving", "\"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": 0.65, }, {", "Invalid gene id response = self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\": False,", "\"error\": \"There are no data found for the given gene\",", "from unittest import TestCase class TestIntegrations(TestCase): maxDiff = None def", "are no data found for the given gene\", } self.assertEqual(response.json,", "\"\"\" # Valid request rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected =", "\"pcc\": -0.116, }, ], } self.assertEqual(response.json, expected) # Invalid species", "\"wasSuccessful\": False, \"error\": \"There are no data found for the", "does not exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = { \"wasSuccessful\":", "various species' genes. \"\"\" # Valid request rice response =", "\"Num_species\": 1, \"Quality\": 1, \"pcc\": 0.65, }, { \"protein_1\": \"LOC_Os01g52560\",", "{ \"wasSuccessful\": False, \"error\": \"There are no data found for", "test retrieving protein interactions for various species' genes. \"\"\" #", "expected = {\"wasSuccessful\": False, \"error\": \"Invalid species or gene ID\"}", "exist response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = { \"wasSuccessful\": False, \"error\":", "\"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\":", "1, \"Quality\": 1, \"pcc\": -0.116, }, ], } self.assertEqual(response.json, expected)", "response = self.app_client.get(\"/interactions/rice/LOC_Os01g52565\") expected = { \"wasSuccessful\": False, \"error\": \"There", "= { \"wasSuccessful\": True, \"data\": [ { \"protein_1\": \"LOC_Os01g01080\", \"protein_2\":", "\"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": 0.65, },", "0.65, }, { \"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\":", "\"data\": [ { \"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\":", "\"There are no data found for the given gene\", }", "Valid request rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = { \"wasSuccessful\":", "1, \"pcc\": 0.65, }, { \"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\":", "1, \"pcc\": -0.116, }, ], } self.assertEqual(response.json, expected) # Invalid", "genes. \"\"\" # Valid request rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected", "False, \"error\": \"Invalid species or gene ID\"} self.assertEqual(response.json, expected) #", "ID\"} self.assertEqual(response.json, expected) # Invalid gene id response = self.app_client.get(\"/interactions/rice/abc\")", "no data found for the given gene\", } self.assertEqual(response.json, expected)", "self.assertEqual(response.json, expected) # Invalid gene id response = self.app_client.get(\"/interactions/rice/abc\") expected", "protein interactions for various species' genes. \"\"\" # Valid request", "\"Invalid species or gene ID\"} self.assertEqual(response.json, expected) # Invalid gene", "= None def setUp(self): self.app_client = app.test_client() def test_get_itrns(self): \"\"\"", "\"Quality\": 1, \"pcc\": 0.65, }, { \"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\",", "\"pcc\": 0.65, }, { \"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1,", "response = self.app_client.get(\"/interactions/poplar/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid species", "function test retrieving protein interactions for various species' genes. \"\"\"", "response = self.app_client.get(\"/interactions/rice/abc\") expected = {\"wasSuccessful\": False, \"error\": \"Invalid species", "\"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": -0.116, },", "def test_get_itrns(self): \"\"\" This function test retrieving protein interactions for", "\"protein_1\": \"LOC_Os01g52560\", \"protein_2\": \"LOC_Os01g73310\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\": 1,", "{ \"protein_1\": \"LOC_Os01g01080\", \"protein_2\": \"LOC_Os01g52560\", \"total_hits\": 1, \"Num_species\": 1, \"Quality\":", "TestCase class TestIntegrations(TestCase): maxDiff = None def setUp(self): self.app_client =", "None def setUp(self): self.app_client = app.test_client() def test_get_itrns(self): \"\"\" This", "= {\"wasSuccessful\": False, \"error\": \"Invalid species or gene ID\"} self.assertEqual(response.json,", "# Valid request rice response = self.app_client.get(\"/interactions/rice/LOC_Os01g52560\") expected = {", "1, \"Num_species\": 1, \"Quality\": 1, \"pcc\": 0.65, }, { \"protein_1\":" ]
[ "the multiple values which can be used to make calculations", "'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = []", "#pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV", "data in csv format: # # [date, open, high, low,", "start_date, end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV =", "BeautifulSoup # If start date and end date is the", "returned and # if not the multiple values which can", "#pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder", "= ''.join(arr) response = urllib2.urlopen(link) cr = csv.reader(response) with open(pathToCSV,", "ticker (company symbol) # interval (d (daily), m (monthly), q", "url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url = ''.join(url_builder)", "urllib2 from bs4 import BeautifulSoup # If start date and", "import BeautifulSoup # If start date and end date is", "same only one value will be returned and # if", "end_date (YYYYMMDD) def get_historical_data(ticker, interval, start_date, end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv'", "BeautifulSoup4 before running # # prints out historical data in", "be used to make calculations # # ticker (company symbol)", "'w') as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr) def", "import re, csv, sys, urllib2 from bs4 import BeautifulSoup #", "(d (daily), m (monthly), q (quarterly), y (yearly)) # start_date", "link = link.group(1) except AttributeError: with open(pathToCSV, 'w') as csvfile:", "# # [date, open, high, low, close, volume] # import", "re, csv, sys, urllib2 from bs4 import BeautifulSoup # If", "'html.parser') link = soup.findAll('a', href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"', str(link)) try:", "def get_historical_data(ticker, interval, start_date, end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV =", "# start_date (YYYYMMDD) # end_date (YYYYMMDD) def get_historical_data(ticker, interval, start_date,", "# If start date and end date is the same", "= link.group(1) except AttributeError: with open(pathToCSV, 'w') as csvfile: wr", "date is the same only one value will be returned", "open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('')", "sys.argv get_historical_data(args[1], args[2], args[3], args[4]) if __name__ == '__main__': main()", "make calculations # # ticker (company symbol) # interval (d", "url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url = ''.join(url_builder) page = urllib2.urlopen(url)", "get_historical_data(ticker, interval, start_date, end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv'", "csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr) def main(): args", "wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr) def main(): args =", "# interval (d (daily), m (monthly), q (quarterly), y (yearly))", "= link.replace('amp;', '') arr = [] arr.append('https://stooq.com/') arr.append(link) link =", "if not the multiple values which can be used to", "url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url =", "If start date and end date is the same only", "= '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker)", "= csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('') exit() link = link.replace('amp;', '')", "calculations # # ticker (company symbol) # interval (d (daily),", "main(): args = sys.argv get_historical_data(args[1], args[2], args[3], args[4]) if __name__", "cr = csv.reader(response) with open(pathToCSV, 'w') as csvfile: wr =", "'/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=')", "q (quarterly), y (yearly)) # start_date (YYYYMMDD) # end_date (YYYYMMDD)", "soup.findAll('a', href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"', str(link)) try: link = link.group(1)", "arr.append('https://stooq.com/') arr.append(link) link = ''.join(arr) response = urllib2.urlopen(link) cr =", "# prints out historical data in csv format: # #", "# # prints out historical data in csv format: #", "csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('') exit() link = link.replace('amp;', '') arr", "sys, urllib2 from bs4 import BeautifulSoup # If start date", "low, close, volume] # import re, csv, sys, urllib2 from", "to make calculations # # ticker (company symbol) # interval", "(YYYYMMDD) # end_date (YYYYMMDD) def get_historical_data(ticker, interval, start_date, end_date): #pathToCSV", "args = sys.argv get_historical_data(args[1], args[2], args[3], args[4]) if __name__ ==", "quotechar='#') wr.writerows(cr) def main(): args = sys.argv get_historical_data(args[1], args[2], args[3],", "running # # prints out historical data in csv format:", "csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('') exit() link =", "wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('') exit() link = link.replace('amp;',", "link.replace('amp;', '') arr = [] arr.append('https://stooq.com/') arr.append(link) link = ''.join(arr)", "date and end date is the same only one value", "try: link = link.group(1) except AttributeError: with open(pathToCSV, 'w') as", "AttributeError: with open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile, delimiter='@',", "arr.append(link) link = ''.join(arr) response = urllib2.urlopen(link) cr = csv.reader(response)", "= BeautifulSoup(page, 'html.parser') link = soup.findAll('a', href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"',", "= 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=')", "= [] arr.append('https://stooq.com/') arr.append(link) link = ''.join(arr) response = urllib2.urlopen(link)", "[] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url", "link = ''.join(arr) response = urllib2.urlopen(link) cr = csv.reader(response) with", "with open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#')", "multiple values which can be used to make calculations #", "# [date, open, high, low, close, volume] # import re,", "used to make calculations # # ticker (company symbol) #", "re.search('\"(.*)\"', str(link)) try: link = link.group(1) except AttributeError: with open(pathToCSV,", "open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr)", "def main(): args = sys.argv get_historical_data(args[1], args[2], args[3], args[4]) if", "# # ticker (company symbol) # interval (d (daily), m", "only one value will be returned and # if not", "= sys.argv get_historical_data(args[1], args[2], args[3], args[4]) if __name__ == '__main__':", "'w') as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('') exit()", "as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerow('') exit() link", "csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr) def main(): args = sys.argv get_historical_data(args[1],", "csv format: # # [date, open, high, low, close, volume]", "(quarterly), y (yearly)) # start_date (YYYYMMDD) # end_date (YYYYMMDD) def", "will be returned and # if not the multiple values", "(YYYYMMDD) def get_historical_data(ticker, interval, start_date, end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV", "'') arr = [] arr.append('https://stooq.com/') arr.append(link) link = ''.join(arr) response", "values which can be used to make calculations # #", "link = re.search('\"(.*)\"', str(link)) try: link = link.group(1) except AttributeError:", "exit() link = link.replace('amp;', '') arr = [] arr.append('https://stooq.com/') arr.append(link)", "= csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr) def main(): args = sys.argv", "and end date is the same only one value will", "# if not the multiple values which can be used", "urllib2.urlopen(link) cr = csv.reader(response) with open(pathToCSV, 'w') as csvfile: wr", "open, high, low, close, volume] # import re, csv, sys,", "response = urllib2.urlopen(link) cr = csv.reader(response) with open(pathToCSV, 'w') as", "historical data in csv format: # # [date, open, high,", "not the multiple values which can be used to make", "interval (d (daily), m (monthly), q (quarterly), y (yearly)) #", "urllib2.urlopen(url) soup = BeautifulSoup(page, 'html.parser') link = soup.findAll('a', href=re.compile('^q/d/l/')) link", "url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url = ''.join(url_builder) page", "symbol) # interval (d (daily), m (monthly), q (quarterly), y", "'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date)", "from bs4 import BeautifulSoup # If start date and end", "[date, open, high, low, close, volume] # import re, csv,", "= 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder =", "value will be returned and # if not the multiple", "# end_date (YYYYMMDD) def get_historical_data(ticker, interval, start_date, end_date): #pathToCSV =", "(yearly)) # start_date (YYYYMMDD) # end_date (YYYYMMDD) def get_historical_data(ticker, interval,", "format: # # [date, open, high, low, close, volume] #", "volume] # import re, csv, sys, urllib2 from bs4 import", "= '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV =", "str(link)) try: link = link.group(1) except AttributeError: with open(pathToCSV, 'w')", "''.join(arr) response = urllib2.urlopen(link) cr = csv.reader(response) with open(pathToCSV, 'w')", "delimiter='@', quotechar='#') wr.writerows(cr) def main(): args = sys.argv get_historical_data(args[1], args[2],", "link = soup.findAll('a', href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"', str(link)) try: link", "BeautifulSoup(page, 'html.parser') link = soup.findAll('a', href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"', str(link))", "except AttributeError: with open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile,", "delimiter='@', quotechar='#') wr.writerow('') exit() link = link.replace('amp;', '') arr =", "(company symbol) # interval (d (daily), m (monthly), q (quarterly),", "quotechar='#') wr.writerow('') exit() link = link.replace('amp;', '') arr = []", "which can be used to make calculations # # ticker", "'/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv'", "(daily), m (monthly), q (quarterly), y (yearly)) # start_date (YYYYMMDD)", "can be used to make calculations # # ticker (company", "# install BeautifulSoup4 before running # # prints out historical", "= [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval)", "arr = [] arr.append('https://stooq.com/') arr.append(link) link = ''.join(arr) response =", "in csv format: # # [date, open, high, low, close,", "is the same only one value will be returned and", "bs4 import BeautifulSoup # If start date and end date", "wr.writerow('') exit() link = link.replace('amp;', '') arr = [] arr.append('https://stooq.com/')", "= csv.reader(response) with open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile,", "= urllib2.urlopen(url) soup = BeautifulSoup(page, 'html.parser') link = soup.findAll('a', href=re.compile('^q/d/l/'))", "url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url = ''.join(url_builder) page =", "m (monthly), q (quarterly), y (yearly)) # start_date (YYYYMMDD) #", "#pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv' pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = [] url_builder.append('https://stooq.com/q/d/?s=')", "page = urllib2.urlopen(url) soup = BeautifulSoup(page, 'html.parser') link = soup.findAll('a',", "link = link.replace('amp;', '') arr = [] arr.append('https://stooq.com/') arr.append(link) link", "url_builder.append(end_date) url_builder.append('&i=') url_builder.append(interval) url = ''.join(url_builder) page = urllib2.urlopen(url) soup", "end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV = '/Users/Michal/Desktop/apache-tomcat-8.5.28/bin/misc/file.csv'", "(monthly), q (quarterly), y (yearly)) # start_date (YYYYMMDD) # end_date", "start date and end date is the same only one", "''.join(url_builder) page = urllib2.urlopen(url) soup = BeautifulSoup(page, 'html.parser') link =", "high, low, close, volume] # import re, csv, sys, urllib2", "csv.reader(response) with open(pathToCSV, 'w') as csvfile: wr = csv.writer(csvfile, delimiter='@',", "the same only one value will be returned and #", "interval, start_date, end_date): #pathToCSV = '/Users/Michal/Downloads/dialogflow-java-client-master2/samples/clients/VirtualTradingAssistant/src/main/java/ai/api/examples/fileStore/file.csv' #pathToCSV = 'C:\\\\Users\\\\ojwoo\\\\Documents\\\\Warwick\\\\CS261\\\\Coursework\\\\dialogflow-java-client-master\\\\samples\\\\clients\\\\VirtualTradingAssistant\\\\src\\\\main\\\\java\\\\ai\\\\api\\\\examples\\\\fileStore\\\\file.csv' #pathToCSV", "link.group(1) except AttributeError: with open(pathToCSV, 'w') as csvfile: wr =", "url_builder = [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date) url_builder.append('&d2=') url_builder.append(end_date) url_builder.append('&i=')", "one value will be returned and # if not the", "end date is the same only one value will be", "prints out historical data in csv format: # # [date,", "wr.writerows(cr) def main(): args = sys.argv get_historical_data(args[1], args[2], args[3], args[4])", "[] arr.append('https://stooq.com/') arr.append(link) link = ''.join(arr) response = urllib2.urlopen(link) cr", "= soup.findAll('a', href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"', str(link)) try: link =", "y (yearly)) # start_date (YYYYMMDD) # end_date (YYYYMMDD) def get_historical_data(ticker,", "pathToCSV = 'C:\\\\apache-tomcat-8.5.28\\\\bin\\\\misc\\\\file.csv' url_builder = [] url_builder.append('https://stooq.com/q/d/?s=') url_builder.append(ticker) url_builder.append('&c=0&d1=') url_builder.append(start_date)", "url_builder.append(interval) url = ''.join(url_builder) page = urllib2.urlopen(url) soup = BeautifulSoup(page,", "= re.search('\"(.*)\"', str(link)) try: link = link.group(1) except AttributeError: with", "url_builder.append('&i=') url_builder.append(interval) url = ''.join(url_builder) page = urllib2.urlopen(url) soup =", "url = ''.join(url_builder) page = urllib2.urlopen(url) soup = BeautifulSoup(page, 'html.parser')", "as csvfile: wr = csv.writer(csvfile, delimiter='@', quotechar='#') wr.writerows(cr) def main():", "be returned and # if not the multiple values which", "= urllib2.urlopen(link) cr = csv.reader(response) with open(pathToCSV, 'w') as csvfile:", "and # if not the multiple values which can be", "start_date (YYYYMMDD) # end_date (YYYYMMDD) def get_historical_data(ticker, interval, start_date, end_date):", "# ticker (company symbol) # interval (d (daily), m (monthly),", "close, volume] # import re, csv, sys, urllib2 from bs4", "before running # # prints out historical data in csv", "soup = BeautifulSoup(page, 'html.parser') link = soup.findAll('a', href=re.compile('^q/d/l/')) link =", "href=re.compile('^q/d/l/')) link = re.search('\"(.*)\"', str(link)) try: link = link.group(1) except", "install BeautifulSoup4 before running # # prints out historical data", "= ''.join(url_builder) page = urllib2.urlopen(url) soup = BeautifulSoup(page, 'html.parser') link", "csv, sys, urllib2 from bs4 import BeautifulSoup # If start", "# import re, csv, sys, urllib2 from bs4 import BeautifulSoup", "out historical data in csv format: # # [date, open," ]
[ "message[0] def check_connection(fun): def inner(self, *args, **kwargs): if self.state ==", "= ('Time left', 'Temp 1', 'Temp 2', 'Off Goal', 'Temp", "def __str__(self): return \"\\t\".join( map(str, (self.countdown, self.t1, self.t2, self.dg, self.dt,", "oven_status(self): self.conn.write(b'r') @check_connection def oven_query_config(self): self.conn.write(b'q') def disconnect(self): self.state =", "STATE_DISCONNECTED = 127 # can't connect to serial HB_CYCLE =", "9600, timeout=0.05) # empty buffer while len(self.conn.read(1)) > 0: pass", "self.conn.write(b't') @check_connection def oven_status(self): self.conn.write(b'r') @check_connection def oven_query_config(self): self.conn.write(b'q') def", "len(self.conn.read(1)) > 0: pass self.state = 'connected' sleep(0.01) self.oven_query_config() sleep(0.2)", "class Client(threading.Thread): \"\"\" Client class for hotbox serial connection \"\"\"", "'dg', 'dt', 'part', 'state', 'cycle', 'time', 'goal') def __init__(self, message):", "Client(threading.Thread): \"\"\" Client class for hotbox serial connection \"\"\" parsers", "from time import sleep import queue import threading import serial", "buffer += c if parsed_length == msg_length: data = self.parsers[mtype](buffer)", "c[0] msg_length = MSG_LENGTHS[mtype] buffer = bytes() continue if parsed_length", "MSG_LENGTHS = {MSG_RUN_STATUS: 20, MSG_CONFIG: 9, MSG_STATUS: 5} STATE_START =", "MSG_CONFIG = 2 MSG_STATUS = 3 MSG_LENGTHS = {MSG_RUN_STATUS: 20,", "self.running: # Don't do anything if disconnected if (self.state ==", "== 'disconnected'): sleep(0.1) continue try: c = self.conn.read(1) except SerialException:", "self.state, self.dg, self.dt, self.time, self.goal, ) = struct.unpack('=BBLBB?bbLB', message) def", "\"connected\": try: fun(self, *args, **kwargs) except SerialException: self.disconnect() # workaround", "Goal', 'Temp Change', 'Duty cycle (/30)', 'Heating', 'Cycle', 'Total time',", "any other case this is a data byte parsed_length +=", "as e: self.disconnect() finally: self.start_message = 0 def run(self): self.running", "continue if parsed_length < 3: # Abort if not a", "self.dg, self.dt, self.time, self.goal, ) = struct.unpack('=BBLBB?bbLB', message) def __str__(self):", "+= 1 buffer += c if parsed_length == msg_length: data", "message): (self.t1, self.t2, self.countdown, self.part, self.cycle, self.state, self.dg, self.dt, self.time,", "import io from collections import defaultdict import struct from time", "4 STATE_INIT = 5 STATE_DISCONNECTED = 127 # can't connect", "msg_length = 0 @check_connection def oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime,", "0 @check_connection def oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection", "*args, **kwargs) except SerialException: self.disconnect() # workaround for bug in", "= ('countdown', 't1', 't2', 'dg', 'dt', 'part', 'state', 'cycle', 'time',", "pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() finally: self.start_message", "self.state = 'disconnected' self.msg_queue = {MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS:", "self.countdown, self.part, self.cycle, self.state, self.dg, self.dt, self.time, self.goal, ) =", "this is the message type byte if parsed_length == 3:", "# can't connect to serial HB_CYCLE = 30 class RunStatus:", "parsed_length == 3: parsed_length += 1 if c[0] == 0:", "(self.countdown, self.t1, self.t2, self.dg, self.dt, self.part, \"On\" if self.state else", "anything if disconnected if (self.state == 'disconnected'): sleep(0.1) continue try:", "temp') MSG_RUN_STATUS = 1 MSG_CONFIG = 2 MSG_STATUS = 3", "SerialException: self.disconnect() # workaround for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/", "self.conn.write(b's') @check_connection def oven_stop(self): self.conn.write(b't') @check_connection def oven_status(self): self.conn.write(b'r') @check_connection", "= 0 @check_connection def oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp))", "__slots__ = ('temp', 'time') def __init__(self, message): (self.time, self.temp) =", "= 0 msg_length = 0 @check_connection def oven_configure(self, ctime, temp):", "(/30)', 'Heating', 'Cycle', 'Total time', 'Goal temp') MSG_RUN_STATUS = 1", "self.dg, self.dt, self.part, \"On\" if self.state else \"Off\", self.state, self.cycle,", "sleep import queue import threading import serial from serial import", "\"\"\" parsers = { MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig,", "except SerialException: self.disconnect() continue # workaround for bug in pyserial", "byte parsed_length += 1 buffer += c if parsed_length ==", "= struct.unpack('=BBLBB?bbLB', message) def __str__(self): return \"\\t\".join( map(str, (self.countdown, self.t1,", "c = self.conn.read(1) except SerialException: self.disconnect() continue # workaround for", "temp)) @check_connection def oven_start(self): self.conn.write(b's') @check_connection def oven_stop(self): self.conn.write(b't') @check_connection", "serial connection \"\"\" parsers = { MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus,", "\"On\" if self.state else \"Off\", self.state, self.cycle, self.time, self.goal, )", "self.oven_query_config() sleep(0.2) self.oven_status() except SerialException: self.disconnect() # workaround for bug", "as e: self.disconnect() continue # wait for message if not", "self.state else \"Off\", self.state, self.cycle, self.time, self.goal, ) )) class", ")) class OvenConfig: __slots__ = ('temp', 'time') def __init__(self, message):", "mtype = 0 msg_length = 0 @check_connection def oven_configure(self, ctime,", "= MSG_LENGTHS[mtype] buffer = bytes() continue if parsed_length < 3:", "ctime, temp)) @check_connection def oven_start(self): self.conn.write(b's') @check_connection def oven_stop(self): self.conn.write(b't')", "'time', 'goal') def __init__(self, message): (self.t1, self.t2, self.countdown, self.part, self.cycle,", "{ MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig, } def __init__(self):", "c[0]: parsed_length = 0 continue # otherwise increment parsed length", "parsed_length += 1 if c[0] == 0: continue mtype =", "try: fun(self, *args, **kwargs) except SerialException: self.disconnect() # workaround for", "data byte parsed_length += 1 buffer += c if parsed_length", "do anything if disconnected if (self.state == 'disconnected'): sleep(0.1) continue", "= bytes() continue if parsed_length < 3: # Abort if", "0 mtype = 0 msg_length = 0 while self.running: #", "check_connection(fun): def inner(self, *args, **kwargs): if self.state == \"connected\": try:", "STATE_READY = 3 STATE_BOOT = 4 STATE_INIT = 5 STATE_DISCONNECTED", "*args, **kwargs): if self.state == \"connected\": try: fun(self, *args, **kwargs)", "c: continue # this is the message type byte if", "else \"Off\", self.state, self.cycle, self.time, self.goal, ) )) class OvenConfig:", "MSG_RUN_STATUS: queue.Queue(), } def connect(self, port): try: self.conn = serial.Serial(port,", "self.t1, self.t2, self.dg, self.dt, self.part, \"On\" if self.state else \"Off\",", "'disconnected'): sleep(0.1) continue try: c = self.conn.read(1) except SerialException: self.disconnect()", "import struct from time import sleep import queue import threading", "for message if not c: continue # this is the", "self.part, self.cycle, self.state, self.dg, self.dt, self.time, self.goal, ) = struct.unpack('=BBLBB?bbLB',", "1 continue # in any other case this is a", "== \"connected\": try: fun(self, *args, **kwargs) except SerialException: self.disconnect() #", "def __init__(self, message): self.status = message[0] def check_connection(fun): def inner(self,", "serial from serial import SerialException RUN_LABELS = ('Time left', 'Temp", "import threading import serial from serial import SerialException RUN_LABELS =", "'goal') def __init__(self, message): (self.t1, self.t2, self.countdown, self.part, self.cycle, self.state,", "byte if c[0]: parsed_length = 0 continue # otherwise increment", "OvenConfig: __slots__ = ('temp', 'time') def __init__(self, message): (self.time, self.temp)", "in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() finally:", "a data byte parsed_length += 1 buffer += c if", "= {MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(), } def connect(self,", "STATE_ACTIVE = 2 STATE_READY = 3 STATE_BOOT = 4 STATE_INIT", "in any other case this is a data byte parsed_length", "return \"\\t\".join( map(str, (self.countdown, self.t1, self.t2, self.dg, self.dt, self.part, \"On\"", "0 msg_length = 0 @check_connection def oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB',", "continue # this is the message type byte if parsed_length", "# Don't do anything if disconnected if (self.state == 'disconnected'):", "0 mtype = 0 msg_length = 0 @check_connection def oven_configure(self,", "MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(), } def connect(self, port): try: self.conn", "from collections import defaultdict import struct from time import sleep", "('countdown', 't1', 't2', 'dg', 'dt', 'part', 'state', 'cycle', 'time', 'goal')", "as e: self.disconnect() return inner class Client(threading.Thread): \"\"\" Client class", "2', 'Off Goal', 'Temp Change', 'Duty cycle (/30)', 'Heating', 'Cycle',", "import SerialException RUN_LABELS = ('Time left', 'Temp 1', 'Temp 2',", "0 def run(self): self.running = 1 parsed_length = 0 mtype", "class for hotbox serial connection \"\"\" parsers = { MSG_STATUS:", "self.disconnect() continue # wait for message if not c: continue", "sleep(0.1) continue try: c = self.conn.read(1) except SerialException: self.disconnect() continue", "'t1', 't2', 'dg', 'dt', 'part', 'state', 'cycle', 'time', 'goal') def", "def oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection def oven_start(self):", "if (self.state == 'disconnected'): sleep(0.1) continue try: c = self.conn.read(1)", "= 5 STATE_DISCONNECTED = 127 # can't connect to serial", "serial.Serial(port, 9600, timeout=0.05) # empty buffer while len(self.conn.read(1)) > 0:", "+= 1 continue # in any other case this is", "TypeError as e: self.disconnect() finally: self.start_message = 0 def run(self):", "5 STATE_DISCONNECTED = 127 # can't connect to serial HB_CYCLE", "0: continue mtype = c[0] msg_length = MSG_LENGTHS[mtype] buffer =", "msg_length: data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length = 0 mtype =", "# workaround for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError", "'connected' sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status() except SerialException: self.disconnect() # workaround", "finally: self.start_message = 0 def run(self): self.running = 1 parsed_length", "temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection def oven_start(self): self.conn.write(b's') @check_connection def", ") = struct.unpack('=BBLBB?bbLB', message) def __str__(self): return \"\\t\".join( map(str, (self.countdown,", "2 STATE_READY = 3 STATE_BOOT = 4 STATE_INIT = 5", "fun(self, *args, **kwargs) except SerialException: self.disconnect() # workaround for bug", "not c: continue # this is the message type byte", "buffer while len(self.conn.read(1)) > 0: pass self.state = 'connected' sleep(0.01)", "in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() return", "__init__(self): super().__init__() self.state = 'disconnected' self.msg_queue = {MSG_STATUS: queue.Queue(), MSG_CONFIG:", "MSG_CONFIG: OvenConfig, } def __init__(self): super().__init__() self.state = 'disconnected' self.msg_queue", "if not c: continue # this is the message type", "'time') def __init__(self, message): (self.time, self.temp) = struct.unpack('=LB', message) class", "message) class OvenStatus: __slots__ = ('status',) def __init__(self, message): self.status", "MSG_LENGTHS[mtype] buffer = bytes() continue if parsed_length < 3: #", "3: # Abort if not a null byte if c[0]:", "@check_connection def oven_stop(self): self.conn.write(b't') @check_connection def oven_status(self): self.conn.write(b'r') @check_connection def", "= message[0] def check_connection(fun): def inner(self, *args, **kwargs): if self.state", "1 STATE_ACTIVE = 2 STATE_READY = 3 STATE_BOOT = 4", "__init__(self, message): self.status = message[0] def check_connection(fun): def inner(self, *args,", "timeout=0.05) # empty buffer while len(self.conn.read(1)) > 0: pass self.state", "MSG_CONFIG: 9, MSG_STATUS: 5} STATE_START = 1 STATE_ACTIVE = 2", "disconnected if (self.state == 'disconnected'): sleep(0.1) continue try: c =", "3 STATE_BOOT = 4 STATE_INIT = 5 STATE_DISCONNECTED = 127", "connect(self, port): try: self.conn = serial.Serial(port, 9600, timeout=0.05) # empty", "3: parsed_length += 1 if c[0] == 0: continue mtype", "struct from time import sleep import queue import threading import", "self.t2, self.countdown, self.part, self.cycle, self.state, self.dg, self.dt, self.time, self.goal, )", "import sleep import queue import threading import serial from serial", "ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection def oven_start(self): self.conn.write(b's') @check_connection", "{MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(), } def connect(self, port):", "parsed_length += 1 continue # in any other case this", "def oven_stop(self): self.conn.write(b't') @check_connection def oven_status(self): self.conn.write(b'r') @check_connection def oven_query_config(self):", "'Duty cycle (/30)', 'Heating', 'Cycle', 'Total time', 'Goal temp') MSG_RUN_STATUS", "http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() finally: self.start_message = 0", "case this is a data byte parsed_length += 1 buffer", "run(self): self.running = 1 parsed_length = 0 mtype = 0", "self.running = 1 parsed_length = 0 mtype = 0 msg_length", "'Goal temp') MSG_RUN_STATUS = 1 MSG_CONFIG = 2 MSG_STATUS =", "Abort if not a null byte if c[0]: parsed_length =", "__slots__ = ('status',) def __init__(self, message): self.status = message[0] def", "__str__(self): return \"\\t\".join( map(str, (self.countdown, self.t1, self.t2, self.dg, self.dt, self.part,", "5} STATE_START = 1 STATE_ACTIVE = 2 STATE_READY = 3", "= self.conn.read(1) except SerialException: self.disconnect() continue # workaround for bug", "if parsed_length == 3: parsed_length += 1 if c[0] ==", "self.t2, self.dg, self.dt, self.part, \"On\" if self.state else \"Off\", self.state,", "0: pass self.state = 'connected' sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status() except", "3 MSG_LENGTHS = {MSG_RUN_STATUS: 20, MSG_CONFIG: 9, MSG_STATUS: 5} STATE_START", "= 30 class RunStatus: __slots__ = ('countdown', 't1', 't2', 'dg',", "time import sleep import queue import threading import serial from", "self.time, self.goal, ) = struct.unpack('=BBLBB?bbLB', message) def __str__(self): return \"\\t\".join(", "self.goal, ) )) class OvenConfig: __slots__ = ('temp', 'time') def", "c[0] == 0: continue mtype = c[0] msg_length = MSG_LENGTHS[mtype]", "if c[0]: parsed_length = 0 continue # otherwise increment parsed", "STATE_START = 1 STATE_ACTIVE = 2 STATE_READY = 3 STATE_BOOT", "import sys import io from collections import defaultdict import struct", "= 1 MSG_CONFIG = 2 MSG_STATUS = 3 MSG_LENGTHS =", "1 parsed_length = 0 mtype = 0 msg_length = 0", "9, MSG_STATUS: 5} STATE_START = 1 STATE_ACTIVE = 2 STATE_READY", "**kwargs): if self.state == \"connected\": try: fun(self, *args, **kwargs) except", "\"\"\" Client class for hotbox serial connection \"\"\" parsers =", "2 MSG_STATUS = 3 MSG_LENGTHS = {MSG_RUN_STATUS: 20, MSG_CONFIG: 9,", "= 2 STATE_READY = 3 STATE_BOOT = 4 STATE_INIT =", "mtype = c[0] msg_length = MSG_LENGTHS[mtype] buffer = bytes() continue", "not a null byte if c[0]: parsed_length = 0 continue", "= ('status',) def __init__(self, message): self.status = message[0] def check_connection(fun):", "http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() continue # wait for", "# http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() return inner class", "TypeError as e: self.disconnect() continue # wait for message if", "def __init__(self, message): (self.t1, self.t2, self.countdown, self.part, self.cycle, self.state, self.dg,", "1 if c[0] == 0: continue mtype = c[0] msg_length", "super().__init__() self.state = 'disconnected' self.msg_queue = {MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(),", "= 2 MSG_STATUS = 3 MSG_LENGTHS = {MSG_RUN_STATUS: 20, MSG_CONFIG:", "queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(), } def connect(self, port): try:", "pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() return inner", "defaultdict import struct from time import sleep import queue import", "if self.state == \"connected\": try: fun(self, *args, **kwargs) except SerialException:", "= struct.unpack('=LB', message) class OvenStatus: __slots__ = ('status',) def __init__(self,", "= serial.Serial(port, 9600, timeout=0.05) # empty buffer while len(self.conn.read(1)) >", "struct.unpack('=BBLBB?bbLB', message) def __str__(self): return \"\\t\".join( map(str, (self.countdown, self.t1, self.t2,", "0 msg_length = 0 while self.running: # Don't do anything", "= c[0] msg_length = MSG_LENGTHS[mtype] buffer = bytes() continue if", "if parsed_length == msg_length: data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length =", "= 3 STATE_BOOT = 4 STATE_INIT = 5 STATE_DISCONNECTED =", "threading import serial from serial import SerialException RUN_LABELS = ('Time", "self.dt, self.time, self.goal, ) = struct.unpack('=BBLBB?bbLB', message) def __str__(self): return", "class RunStatus: __slots__ = ('countdown', 't1', 't2', 'dg', 'dt', 'part',", "STATE_INIT = 5 STATE_DISCONNECTED = 127 # can't connect to", "can't connect to serial HB_CYCLE = 30 class RunStatus: __slots__", "self.msg_queue[mtype].put(data) parsed_length = 0 mtype = 0 msg_length = 0", "'state', 'cycle', 'time', 'goal') def __init__(self, message): (self.t1, self.t2, self.countdown,", "__init__(self, message): (self.t1, self.t2, self.countdown, self.part, self.cycle, self.state, self.dg, self.dt,", "self.disconnect() continue # workaround for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/", "mtype = 0 msg_length = 0 while self.running: # Don't", "self.goal, ) = struct.unpack('=BBLBB?bbLB', message) def __str__(self): return \"\\t\".join( map(str,", "'Temp Change', 'Duty cycle (/30)', 'Heating', 'Cycle', 'Total time', 'Goal", "is a data byte parsed_length += 1 buffer += c", "class OvenStatus: __slots__ = ('status',) def __init__(self, message): self.status =", "serial import SerialException RUN_LABELS = ('Time left', 'Temp 1', 'Temp", "Don't do anything if disconnected if (self.state == 'disconnected'): sleep(0.1)", "self.conn.read(1) except SerialException: self.disconnect() continue # workaround for bug in", "continue # workaround for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except", "MSG_STATUS = 3 MSG_LENGTHS = {MSG_RUN_STATUS: 20, MSG_CONFIG: 9, MSG_STATUS:", "# in any other case this is a data byte", "data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length = 0 mtype = 0", "= 1 parsed_length = 0 mtype = 0 msg_length =", "parsed_length = 0 mtype = 0 msg_length = 0 @check_connection", "if not a null byte if c[0]: parsed_length = 0", "except TypeError as e: self.disconnect() continue # wait for message", "continue # wait for message if not c: continue #", "= 3 MSG_LENGTHS = {MSG_RUN_STATUS: 20, MSG_CONFIG: 9, MSG_STATUS: 5}", "import queue import threading import serial from serial import SerialException", "self.state == \"connected\": try: fun(self, *args, **kwargs) except SerialException: self.disconnect()", "def check_connection(fun): def inner(self, *args, **kwargs): if self.state == \"connected\":", "(self.state == 'disconnected'): sleep(0.1) continue try: c = self.conn.read(1) except", "parsed length parsed_length += 1 continue # in any other", "'part', 'state', 'cycle', 'time', 'goal') def __init__(self, message): (self.t1, self.t2,", "inner class Client(threading.Thread): \"\"\" Client class for hotbox serial connection", "{MSG_RUN_STATUS: 20, MSG_CONFIG: 9, MSG_STATUS: 5} STATE_START = 1 STATE_ACTIVE", "STATE_BOOT = 4 STATE_INIT = 5 STATE_DISCONNECTED = 127 #", "message): (self.time, self.temp) = struct.unpack('=LB', message) class OvenStatus: __slots__ =", "parsed_length = 0 continue # otherwise increment parsed length parsed_length", "@check_connection def oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection def", "OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig, } def __init__(self): super().__init__() self.state", ") )) class OvenConfig: __slots__ = ('temp', 'time') def __init__(self,", "self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection def oven_start(self): self.conn.write(b's') @check_connection def oven_stop(self):", "null byte if c[0]: parsed_length = 0 continue # otherwise", "SerialException RUN_LABELS = ('Time left', 'Temp 1', 'Temp 2', 'Off", "connection \"\"\" parsers = { MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG:", "bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect()", "self.conn.write(b'r') @check_connection def oven_query_config(self): self.conn.write(b'q') def disconnect(self): self.state = 'disconnected'", "'Total time', 'Goal temp') MSG_RUN_STATUS = 1 MSG_CONFIG = 2", "self.start_message = 0 def run(self): self.running = 1 parsed_length =", "= ('temp', 'time') def __init__(self, message): (self.time, self.temp) = struct.unpack('=LB',", "e: self.disconnect() return inner class Client(threading.Thread): \"\"\" Client class for", "MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig, } def __init__(self): super().__init__() self.state =", "1 MSG_CONFIG = 2 MSG_STATUS = 3 MSG_LENGTHS = {MSG_RUN_STATUS:", "'cycle', 'time', 'goal') def __init__(self, message): (self.t1, self.t2, self.countdown, self.part,", "self.dt, self.part, \"On\" if self.state else \"Off\", self.state, self.cycle, self.time,", "== 0: continue mtype = c[0] msg_length = MSG_LENGTHS[mtype] buffer", "def __init__(self, message): (self.time, self.temp) = struct.unpack('=LB', message) class OvenStatus:", "'Temp 2', 'Off Goal', 'Temp Change', 'Duty cycle (/30)', 'Heating',", "self.conn = serial.Serial(port, 9600, timeout=0.05) # empty buffer while len(self.conn.read(1))", "except TypeError as e: self.disconnect() return inner class Client(threading.Thread): \"\"\"", "@check_connection def oven_query_config(self): self.conn.write(b'q') def disconnect(self): self.state = 'disconnected' self.msg_queue[MSG_STATUS].put(OvenStatus((STATE_DISCONNECTED,)))", "serial HB_CYCLE = 30 class RunStatus: __slots__ = ('countdown', 't1',", "RunStatus: __slots__ = ('countdown', 't1', 't2', 'dg', 'dt', 'part', 'state',", "self.cycle, self.state, self.dg, self.dt, self.time, self.goal, ) = struct.unpack('=BBLBB?bbLB', message)", "OvenConfig, } def __init__(self): super().__init__() self.state = 'disconnected' self.msg_queue =", "self.disconnect() finally: self.start_message = 0 def run(self): self.running = 1", "while len(self.conn.read(1)) > 0: pass self.state = 'connected' sleep(0.01) self.oven_query_config()", "sleep(0.2) self.oven_status() except SerialException: self.disconnect() # workaround for bug in", "port): try: self.conn = serial.Serial(port, 9600, timeout=0.05) # empty buffer", "continue try: c = self.conn.read(1) except SerialException: self.disconnect() continue #", "= self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length = 0 mtype = 0 msg_length", "oven_stop(self): self.conn.write(b't') @check_connection def oven_status(self): self.conn.write(b'r') @check_connection def oven_query_config(self): self.conn.write(b'q')", "def oven_start(self): self.conn.write(b's') @check_connection def oven_stop(self): self.conn.write(b't') @check_connection def oven_status(self):", "oven_configure(self, ctime, temp): self.conn.write(b'c'+struct.pack('=LB', ctime, temp)) @check_connection def oven_start(self): self.conn.write(b's')", "message): self.status = message[0] def check_connection(fun): def inner(self, *args, **kwargs):", "'Heating', 'Cycle', 'Total time', 'Goal temp') MSG_RUN_STATUS = 1 MSG_CONFIG", "for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e:", "'Temp 1', 'Temp 2', 'Off Goal', 'Temp Change', 'Duty cycle", "def inner(self, *args, **kwargs): if self.state == \"connected\": try: fun(self,", "except TypeError as e: self.disconnect() finally: self.start_message = 0 def", "127 # can't connect to serial HB_CYCLE = 30 class", "# empty buffer while len(self.conn.read(1)) > 0: pass self.state =", "'dt', 'part', 'state', 'cycle', 'time', 'goal') def __init__(self, message): (self.t1,", "except SerialException: self.disconnect() # workaround for bug in pyserial #", "} def connect(self, port): try: self.conn = serial.Serial(port, 9600, timeout=0.05)", "length parsed_length += 1 continue # in any other case", "@check_connection def oven_start(self): self.conn.write(b's') @check_connection def oven_stop(self): self.conn.write(b't') @check_connection def", "for hotbox serial connection \"\"\" parsers = { MSG_STATUS: OvenStatus,", "= 127 # can't connect to serial HB_CYCLE = 30", "wait for message if not c: continue # this is", "('Time left', 'Temp 1', 'Temp 2', 'Off Goal', 'Temp Change',", "self.msg_queue = {MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(), } def", "1 buffer += c if parsed_length == msg_length: data =", "def oven_status(self): self.conn.write(b'r') @check_connection def oven_query_config(self): self.conn.write(b'q') def disconnect(self): self.state", "SerialException: self.disconnect() continue # workaround for bug in pyserial #", "other case this is a data byte parsed_length += 1", "parsers = { MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig, }", "'t2', 'dg', 'dt', 'part', 'state', 'cycle', 'time', 'goal') def __init__(self,", "self.temp) = struct.unpack('=LB', message) class OvenStatus: __slots__ = ('status',) def", "= 0 msg_length = 0 while self.running: # Don't do", "try: c = self.conn.read(1) except SerialException: self.disconnect() continue # workaround", "= 0 continue # otherwise increment parsed length parsed_length +=", "'disconnected' self.msg_queue = {MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(), }", "self.state, self.cycle, self.time, self.goal, ) )) class OvenConfig: __slots__ =", "def connect(self, port): try: self.conn = serial.Serial(port, 9600, timeout=0.05) #", "to serial HB_CYCLE = 30 class RunStatus: __slots__ = ('countdown',", "pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() continue #", "RunStatus, MSG_CONFIG: OvenConfig, } def __init__(self): super().__init__() self.state = 'disconnected'", "def __init__(self): super().__init__() self.state = 'disconnected' self.msg_queue = {MSG_STATUS: queue.Queue(),", "== 3: parsed_length += 1 if c[0] == 0: continue", "= 4 STATE_INIT = 5 STATE_DISCONNECTED = 127 # can't", "__init__(self, message): (self.time, self.temp) = struct.unpack('=LB', message) class OvenStatus: __slots__", "continue mtype = c[0] msg_length = MSG_LENGTHS[mtype] buffer = bytes()", "<reponame>odontomachus/hotbox<filename>client/client.py<gh_stars>0 import sys import io from collections import defaultdict import", "+= c if parsed_length == msg_length: data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data)", "(self.t1, self.t2, self.countdown, self.part, self.cycle, self.state, self.dg, self.dt, self.time, self.goal,", "= 0 mtype = 0 msg_length = 0 while self.running:", "inner(self, *args, **kwargs): if self.state == \"connected\": try: fun(self, *args,", "bytes() continue if parsed_length < 3: # Abort if not", "msg_length = 0 while self.running: # Don't do anything if", "while self.running: # Don't do anything if disconnected if (self.state", "MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig, } def __init__(self): super().__init__()", "self.cycle, self.time, self.goal, ) )) class OvenConfig: __slots__ = ('temp',", "\"Off\", self.state, self.cycle, self.time, self.goal, ) )) class OvenConfig: __slots__", "msg_length = MSG_LENGTHS[mtype] buffer = bytes() continue if parsed_length <", "# http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() finally: self.start_message =", "type byte if parsed_length == 3: parsed_length += 1 if", "20, MSG_CONFIG: 9, MSG_STATUS: 5} STATE_START = 1 STATE_ACTIVE =", "struct.unpack('=LB', message) class OvenStatus: __slots__ = ('status',) def __init__(self, message):", "c if parsed_length == msg_length: data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length", "= {MSG_RUN_STATUS: 20, MSG_CONFIG: 9, MSG_STATUS: 5} STATE_START = 1", "queue import threading import serial from serial import SerialException RUN_LABELS", "parsed_length < 3: # Abort if not a null byte", "message if not c: continue # this is the message", "parsed_length == msg_length: data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length = 0", "pass self.state = 'connected' sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status() except SerialException:", "# this is the message type byte if parsed_length ==", "'Cycle', 'Total time', 'Goal temp') MSG_RUN_STATUS = 1 MSG_CONFIG =", "hotbox serial connection \"\"\" parsers = { MSG_STATUS: OvenStatus, MSG_RUN_STATUS:", "collections import defaultdict import struct from time import sleep import", "sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status() except SerialException: self.disconnect() # workaround for", "self.status = message[0] def check_connection(fun): def inner(self, *args, **kwargs): if", "> 0: pass self.state = 'connected' sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status()", "import serial from serial import SerialException RUN_LABELS = ('Time left',", "0 continue # otherwise increment parsed length parsed_length += 1", "left', 'Temp 1', 'Temp 2', 'Off Goal', 'Temp Change', 'Duty", "self.state = 'connected' sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status() except SerialException: self.disconnect()", "0 while self.running: # Don't do anything if disconnected if", "import defaultdict import struct from time import sleep import queue", "1', 'Temp 2', 'Off Goal', 'Temp Change', 'Duty cycle (/30)',", "map(str, (self.countdown, self.t1, self.t2, self.dg, self.dt, self.part, \"On\" if self.state", "('status',) def __init__(self, message): self.status = message[0] def check_connection(fun): def", "self.oven_status() except SerialException: self.disconnect() # workaround for bug in pyserial", "http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() return inner class Client(threading.Thread):", "a null byte if c[0]: parsed_length = 0 continue #", "\"\\t\".join( map(str, (self.countdown, self.t1, self.t2, self.dg, self.dt, self.part, \"On\" if", "parsed_length += 1 buffer += c if parsed_length == msg_length:", "parsed_length = 0 mtype = 0 msg_length = 0 while", "Change', 'Duty cycle (/30)', 'Heating', 'Cycle', 'Total time', 'Goal temp')", "MSG_RUN_STATUS = 1 MSG_CONFIG = 2 MSG_STATUS = 3 MSG_LENGTHS", "return inner class Client(threading.Thread): \"\"\" Client class for hotbox serial", "TypeError as e: self.disconnect() return inner class Client(threading.Thread): \"\"\" Client", "== msg_length: data = self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length = 0 mtype", "cycle (/30)', 'Heating', 'Cycle', 'Total time', 'Goal temp') MSG_RUN_STATUS =", "= 0 while self.running: # Don't do anything if disconnected", "byte if parsed_length == 3: parsed_length += 1 if c[0]", "empty buffer while len(self.conn.read(1)) > 0: pass self.state = 'connected'", "HB_CYCLE = 30 class RunStatus: __slots__ = ('countdown', 't1', 't2',", "otherwise increment parsed length parsed_length += 1 continue # in", "time', 'Goal temp') MSG_RUN_STATUS = 1 MSG_CONFIG = 2 MSG_STATUS", "in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() continue", "increment parsed length parsed_length += 1 continue # in any", "if self.state else \"Off\", self.state, self.cycle, self.time, self.goal, ) ))", "# http://sourceforge.net/p/pyserial/patches/37/ except TypeError as e: self.disconnect() continue # wait", "self.time, self.goal, ) )) class OvenConfig: __slots__ = ('temp', 'time')", "< 3: # Abort if not a null byte if", "(self.time, self.temp) = struct.unpack('=LB', message) class OvenStatus: __slots__ = ('status',)", "self.disconnect() return inner class Client(threading.Thread): \"\"\" Client class for hotbox", "**kwargs) except SerialException: self.disconnect() # workaround for bug in pyserial", "self.disconnect() # workaround for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except", "self.parsers[mtype](buffer) self.msg_queue[mtype].put(data) parsed_length = 0 mtype = 0 msg_length =", "def run(self): self.running = 1 parsed_length = 0 mtype =", "io from collections import defaultdict import struct from time import", "} def __init__(self): super().__init__() self.state = 'disconnected' self.msg_queue = {MSG_STATUS:", "= 1 STATE_ACTIVE = 2 STATE_READY = 3 STATE_BOOT =", "this is a data byte parsed_length += 1 buffer +=", "30 class RunStatus: __slots__ = ('countdown', 't1', 't2', 'dg', 'dt',", "buffer = bytes() continue if parsed_length < 3: # Abort", "queue.Queue(), MSG_RUN_STATUS: queue.Queue(), } def connect(self, port): try: self.conn =", "try: self.conn = serial.Serial(port, 9600, timeout=0.05) # empty buffer while", "connect to serial HB_CYCLE = 30 class RunStatus: __slots__ =", "('temp', 'time') def __init__(self, message): (self.time, self.temp) = struct.unpack('=LB', message)", "# Abort if not a null byte if c[0]: parsed_length", "if c[0] == 0: continue mtype = c[0] msg_length =", "# otherwise increment parsed length parsed_length += 1 continue #", "workaround for bug in pyserial # http://sourceforge.net/p/pyserial/patches/37/ except TypeError as", "= { MSG_STATUS: OvenStatus, MSG_RUN_STATUS: RunStatus, MSG_CONFIG: OvenConfig, } def", "= 0 mtype = 0 msg_length = 0 @check_connection def", "is the message type byte if parsed_length == 3: parsed_length", "if parsed_length < 3: # Abort if not a null", "MSG_STATUS: 5} STATE_START = 1 STATE_ACTIVE = 2 STATE_READY =", "OvenStatus: __slots__ = ('status',) def __init__(self, message): self.status = message[0]", "+= 1 if c[0] == 0: continue mtype = c[0]", "message type byte if parsed_length == 3: parsed_length += 1", "from serial import SerialException RUN_LABELS = ('Time left', 'Temp 1',", "# wait for message if not c: continue # this", "class OvenConfig: __slots__ = ('temp', 'time') def __init__(self, message): (self.time,", "message) def __str__(self): return \"\\t\".join( map(str, (self.countdown, self.t1, self.t2, self.dg,", "if disconnected if (self.state == 'disconnected'): sleep(0.1) continue try: c", "'Off Goal', 'Temp Change', 'Duty cycle (/30)', 'Heating', 'Cycle', 'Total", "oven_start(self): self.conn.write(b's') @check_connection def oven_stop(self): self.conn.write(b't') @check_connection def oven_status(self): self.conn.write(b'r')", "= 'connected' sleep(0.01) self.oven_query_config() sleep(0.2) self.oven_status() except SerialException: self.disconnect() #", "RUN_LABELS = ('Time left', 'Temp 1', 'Temp 2', 'Off Goal',", "e: self.disconnect() finally: self.start_message = 0 def run(self): self.running =", "__slots__ = ('countdown', 't1', 't2', 'dg', 'dt', 'part', 'state', 'cycle',", "continue # in any other case this is a data", "sys import io from collections import defaultdict import struct from", "Client class for hotbox serial connection \"\"\" parsers = {", "the message type byte if parsed_length == 3: parsed_length +=", "@check_connection def oven_status(self): self.conn.write(b'r') @check_connection def oven_query_config(self): self.conn.write(b'q') def disconnect(self):", "= 0 def run(self): self.running = 1 parsed_length = 0", "= 'disconnected' self.msg_queue = {MSG_STATUS: queue.Queue(), MSG_CONFIG: queue.Queue(), MSG_RUN_STATUS: queue.Queue(),", "queue.Queue(), } def connect(self, port): try: self.conn = serial.Serial(port, 9600,", "e: self.disconnect() continue # wait for message if not c:", "self.part, \"On\" if self.state else \"Off\", self.state, self.cycle, self.time, self.goal,", "continue # otherwise increment parsed length parsed_length += 1 continue" ]
[ "test_framework.blocktools import create_block, create_coinbase from test_framework.mininode import (CBlockHeader, network_thread_start, P2PInterface,", "from test_framework.mininode import (CBlockHeader, network_thread_start, P2PInterface, msg_block, msg_headers) from test_framework.test_framework", "2): block = create_block(tip, create_coinbase(height), time) block.solve() blocks.append(block) tip =", "self.num_nodes = 1 # Setting minimumchainwork makes sure we test", "test IBD as well as post-IBD self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202", "create_block, create_coinbase from test_framework.mininode import (CBlockHeader, network_thread_start, P2PInterface, msg_block, msg_headers)", "Distributed under the MIT software license, see the accompanying #", "download (IBD; this occurs once it has passed minimumchainwork) and", "python3 # Copyright (c) 2018 The Bitcoin developers # Distributed", "msg_headers) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import wait_until, p2p_port", "= [CBlockHeader(block)] self.send_message(msg) def send_block(self, block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def", "makes sure we test IBD as well as post-IBD self.extra_args", "self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202 + 2 * NUM_IBD_BLOCKS)]] def run_test(self):", "[] for i in range(NUM_IBD_BLOCKS * 2): block = create_block(tip,", "need to be sent in-order for b in blocks: node0conn.send_header(b)", "run_test(self): node0conn = BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0 =", "tip = block.sha256 height += 1 time += 1 #", "class BaseNode(P2PInterface): def send_header(self, block): msg = msg_headers() msg.headers =", "sent in-order for b in blocks: node0conn.send_header(b) # Send blocks", "BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0 = self.nodes[0] tip =", "def set_test_params(self): self.num_nodes = 1 # Setting minimumchainwork makes sure", "for i in range(NUM_IBD_BLOCKS * 2): block = create_block(tip, create_coinbase(height),", "for b in blocks: node0conn.send_header(b) # Send blocks in some", "node should eventually, completely sync without getting stuck def node_synced():", "msg_headers() msg.headers = [CBlockHeader(block)] self.send_message(msg) def send_block(self, block): self.send_message(msg_block(block)) class", "from test_framework.blocktools import create_block, create_coinbase from test_framework.mininode import (CBlockHeader, network_thread_start,", "Setting minimumchainwork makes sure we test IBD as well as", "developers # Distributed under the MIT software license, see the", "BitcoinTestFramework from test_framework.util import wait_until, p2p_port NUM_IBD_BLOCKS = 50 class", "minimumchainwork) and continues to sync without seizing. \"\"\" import random", "order) blocks exits initial block download (IBD; this occurs once", "to be sent in-order for b in blocks: node0conn.send_header(b) #", "import create_block, create_coinbase from test_framework.mininode import (CBlockHeader, network_thread_start, P2PInterface, msg_block,", "license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\"", "# file COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that a node", "in-order for b in blocks: node0conn.send_header(b) # Send blocks in", "completely sync without getting stuck def node_synced(): return node0.getbestblockhash() ==", "network_thread_start, P2PInterface, msg_block, msg_headers) from test_framework.test_framework import BitcoinTestFramework from test_framework.util", "node receiving many (potentially out of order) blocks exits initial", "[CBlockHeader(block)] self.send_message(msg) def send_block(self, block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def set_test_params(self):", "once it has passed minimumchainwork) and continues to sync without", "this occurs once it has passed minimumchainwork) and continues to", "+ 1 time = node0.getblock(node0.getbestblockhash())['time'] + 1 blocks = []", "The Bitcoin developers # Distributed under the MIT software license,", "# Distributed under the MIT software license, see the accompanying", "see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test", "1 blocks = [] for i in range(NUM_IBD_BLOCKS * 2):", "file COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that a node receiving", "occurs once it has passed minimumchainwork) and continues to sync", "test_framework.test_framework import BitcoinTestFramework from test_framework.util import wait_until, p2p_port NUM_IBD_BLOCKS =", "+= 1 time += 1 # Headers need to be", "def run_test(self): node0conn = BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0", "= BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0 = self.nodes[0] tip", "= self.nodes[0] tip = int(node0.getbestblockhash(), 16) height = node0.getblockcount() +", "= block.sha256 height += 1 time += 1 # Headers", "random order for b in random.sample(blocks, len(blocks)): node0conn.send_block(b) # The", "blocks exits initial block download (IBD; this occurs once it", "send_block(self, block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1", "* NUM_IBD_BLOCKS)]] def run_test(self): node0conn = BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start()", "node0 = self.nodes[0] tip = int(node0.getbestblockhash(), 16) height = node0.getblockcount()", "(c) 2018 The Bitcoin developers # Distributed under the MIT", "and continues to sync without seizing. \"\"\" import random from", "def send_block(self, block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes =", "[\"-minimumchainwork={:#x}\".format(202 + 2 * NUM_IBD_BLOCKS)]] def run_test(self): node0conn = BaseNode()", "= [] for i in range(NUM_IBD_BLOCKS * 2): block =", "block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 #", "len(blocks)): node0conn.send_block(b) # The node should eventually, completely sync without", "The node should eventually, completely sync without getting stuck def", "from test_framework.test_framework import BitcoinTestFramework from test_framework.util import wait_until, p2p_port NUM_IBD_BLOCKS", "continues to sync without seizing. \"\"\" import random from test_framework.blocktools", "P2PInterface, msg_block, msg_headers) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import", "p2p_port NUM_IBD_BLOCKS = 50 class BaseNode(P2PInterface): def send_header(self, block): msg", "msg = msg_headers() msg.headers = [CBlockHeader(block)] self.send_message(msg) def send_block(self, block):", "1 # Headers need to be sent in-order for b", "node0conn.send_header(b) # Send blocks in some random order for b", "self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 # Setting", "without getting stuck def node_synced(): return node0.getbestblockhash() == blocks[-1].hash wait_until(node_synced)", "return node0.getbestblockhash() == blocks[-1].hash wait_until(node_synced) if __name__ == '__main__': SyncChainTest().main()", "that a node receiving many (potentially out of order) blocks", "create_coinbase from test_framework.mininode import (CBlockHeader, network_thread_start, P2PInterface, msg_block, msg_headers) from", "of order) blocks exits initial block download (IBD; this occurs", "out of order) blocks exits initial block download (IBD; this", "b in random.sample(blocks, len(blocks)): node0conn.send_block(b) # The node should eventually,", "COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that a node receiving many", "from test_framework.util import wait_until, p2p_port NUM_IBD_BLOCKS = 50 class BaseNode(P2PInterface):", "= node0.getblock(node0.getbestblockhash())['time'] + 1 blocks = [] for i in", "import random from test_framework.blocktools import create_block, create_coinbase from test_framework.mininode import", "self.send_message(msg) def send_block(self, block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes", "well as post-IBD self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202 + 2 *", "test_framework.mininode import (CBlockHeader, network_thread_start, P2PInterface, msg_block, msg_headers) from test_framework.test_framework import", "node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0 = self.nodes[0] tip = int(node0.getbestblockhash(),", "= int(node0.getbestblockhash(), 16) height = node0.getblockcount() + 1 time =", "stuck def node_synced(): return node0.getbestblockhash() == blocks[-1].hash wait_until(node_synced) if __name__", "MIT software license, see the accompanying # file COPYING or", "blocks.append(block) tip = block.sha256 height += 1 time += 1", "class SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 # Setting minimumchainwork", "for b in random.sample(blocks, len(blocks)): node0conn.send_block(b) # The node should", "tip = int(node0.getbestblockhash(), 16) height = node0.getblockcount() + 1 time", "node0.getblockcount() + 1 time = node0.getblock(node0.getbestblockhash())['time'] + 1 blocks =", "Test that a node receiving many (potentially out of order)", "time) block.solve() blocks.append(block) tip = block.sha256 height += 1 time", "should eventually, completely sync without getting stuck def node_synced(): return", "blocks = [] for i in range(NUM_IBD_BLOCKS * 2): block", "a node receiving many (potentially out of order) blocks exits", "node0.getblock(node0.getbestblockhash())['time'] + 1 blocks = [] for i in range(NUM_IBD_BLOCKS", "be sent in-order for b in blocks: node0conn.send_header(b) # Send", "50 class BaseNode(P2PInterface): def send_header(self, block): msg = msg_headers() msg.headers", "send_header(self, block): msg = msg_headers() msg.headers = [CBlockHeader(block)] self.send_message(msg) def", "order for b in random.sample(blocks, len(blocks)): node0conn.send_block(b) # The node", "sure we test IBD as well as post-IBD self.extra_args =", "= create_block(tip, create_coinbase(height), time) block.solve() blocks.append(block) tip = block.sha256 height", "NUM_IBD_BLOCKS)]] def run_test(self): node0conn = BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack()", "under the MIT software license, see the accompanying # file", "SyncChainTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 # Setting minimumchainwork makes", "set_test_params(self): self.num_nodes = 1 # Setting minimumchainwork makes sure we", "http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that a node receiving many (potentially out", "seizing. \"\"\" import random from test_framework.blocktools import create_block, create_coinbase from", "minimumchainwork makes sure we test IBD as well as post-IBD", "IBD as well as post-IBD self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202 +", "p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0 = self.nodes[0] tip = int(node0.getbestblockhash(), 16)", "in blocks: node0conn.send_header(b) # Send blocks in some random order", "block): msg = msg_headers() msg.headers = [CBlockHeader(block)] self.send_message(msg) def send_block(self,", "in range(NUM_IBD_BLOCKS * 2): block = create_block(tip, create_coinbase(height), time) block.solve()", "software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php.", "def node_synced(): return node0.getbestblockhash() == blocks[-1].hash wait_until(node_synced) if __name__ ==", "msg.headers = [CBlockHeader(block)] self.send_message(msg) def send_block(self, block): self.send_message(msg_block(block)) class SyncChainTest(BitcoinTestFramework):", "# Setting minimumchainwork makes sure we test IBD as well", "import BitcoinTestFramework from test_framework.util import wait_until, p2p_port NUM_IBD_BLOCKS = 50", "block.solve() blocks.append(block) tip = block.sha256 height += 1 time +=", "as well as post-IBD self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202 + 2", "[ [\"-minimumchainwork={:#x}\".format(202 + 2 * NUM_IBD_BLOCKS)]] def run_test(self): node0conn =", "wait_until, p2p_port NUM_IBD_BLOCKS = 50 class BaseNode(P2PInterface): def send_header(self, block):", "Headers need to be sent in-order for b in blocks:", "+ 1 blocks = [] for i in range(NUM_IBD_BLOCKS *", "many (potentially out of order) blocks exits initial block download", "(IBD; this occurs once it has passed minimumchainwork) and continues", "= 50 class BaseNode(P2PInterface): def send_header(self, block): msg = msg_headers()", "or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that a node receiving many (potentially", "random from test_framework.blocktools import create_block, create_coinbase from test_framework.mininode import (CBlockHeader,", "1 time = node0.getblock(node0.getbestblockhash())['time'] + 1 blocks = [] for", "sync without getting stuck def node_synced(): return node0.getbestblockhash() == blocks[-1].hash", "+= 1 # Headers need to be sent in-order for", "as post-IBD self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202 + 2 * NUM_IBD_BLOCKS)]]", "exits initial block download (IBD; this occurs once it has", "range(NUM_IBD_BLOCKS * 2): block = create_block(tip, create_coinbase(height), time) block.solve() blocks.append(block)", "random.sample(blocks, len(blocks)): node0conn.send_block(b) # The node should eventually, completely sync", "Bitcoin developers # Distributed under the MIT software license, see", "= [ [\"-minimumchainwork={:#x}\".format(202 + 2 * NUM_IBD_BLOCKS)]] def run_test(self): node0conn", "accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that a", "block download (IBD; this occurs once it has passed minimumchainwork)", "2 * NUM_IBD_BLOCKS)]] def run_test(self): node0conn = BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0))", "initial block download (IBD; this occurs once it has passed", "block.sha256 height += 1 time += 1 # Headers need", "# Send blocks in some random order for b in", "import (CBlockHeader, network_thread_start, P2PInterface, msg_block, msg_headers) from test_framework.test_framework import BitcoinTestFramework", "node0conn.wait_for_verack() node0 = self.nodes[0] tip = int(node0.getbestblockhash(), 16) height =", "1 # Setting minimumchainwork makes sure we test IBD as", "self.nodes[0] tip = int(node0.getbestblockhash(), 16) height = node0.getblockcount() + 1", "has passed minimumchainwork) and continues to sync without seizing. \"\"\"", "time += 1 # Headers need to be sent in-order", "Copyright (c) 2018 The Bitcoin developers # Distributed under the", "msg_block, msg_headers) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import wait_until,", "(potentially out of order) blocks exits initial block download (IBD;", "some random order for b in random.sample(blocks, len(blocks)): node0conn.send_block(b) #", "getting stuck def node_synced(): return node0.getbestblockhash() == blocks[-1].hash wait_until(node_synced) if", "BaseNode(P2PInterface): def send_header(self, block): msg = msg_headers() msg.headers = [CBlockHeader(block)]", "sync without seizing. \"\"\" import random from test_framework.blocktools import create_block,", "= 1 # Setting minimumchainwork makes sure we test IBD", "= msg_headers() msg.headers = [CBlockHeader(block)] self.send_message(msg) def send_block(self, block): self.send_message(msg_block(block))", "height += 1 time += 1 # Headers need to", "#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin developers #", "node0conn = BaseNode() node0conn.peer_connect('127.0.0.1', p2p_port(0)) network_thread_start() node0conn.wait_for_verack() node0 = self.nodes[0]", "i in range(NUM_IBD_BLOCKS * 2): block = create_block(tip, create_coinbase(height), time)", "# Copyright (c) 2018 The Bitcoin developers # Distributed under", "time = node0.getblock(node0.getbestblockhash())['time'] + 1 blocks = [] for i", "blocks in some random order for b in random.sample(blocks, len(blocks)):", "# Headers need to be sent in-order for b in", "create_coinbase(height), time) block.solve() blocks.append(block) tip = block.sha256 height += 1", "eventually, completely sync without getting stuck def node_synced(): return node0.getbestblockhash()", "block = create_block(tip, create_coinbase(height), time) block.solve() blocks.append(block) tip = block.sha256", "+ 2 * NUM_IBD_BLOCKS)]] def run_test(self): node0conn = BaseNode() node0conn.peer_connect('127.0.0.1',", "= node0.getblockcount() + 1 time = node0.getblock(node0.getbestblockhash())['time'] + 1 blocks", "without seizing. \"\"\" import random from test_framework.blocktools import create_block, create_coinbase", "* 2): block = create_block(tip, create_coinbase(height), time) block.solve() blocks.append(block) tip", "16) height = node0.getblockcount() + 1 time = node0.getblock(node0.getbestblockhash())['time'] +", "Send blocks in some random order for b in random.sample(blocks,", "create_block(tip, create_coinbase(height), time) block.solve() blocks.append(block) tip = block.sha256 height +=", "the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. \"\"\" Test that", "import wait_until, p2p_port NUM_IBD_BLOCKS = 50 class BaseNode(P2PInterface): def send_header(self,", "network_thread_start() node0conn.wait_for_verack() node0 = self.nodes[0] tip = int(node0.getbestblockhash(), 16) height", "it has passed minimumchainwork) and continues to sync without seizing.", "1 time += 1 # Headers need to be sent", "def send_header(self, block): msg = msg_headers() msg.headers = [CBlockHeader(block)] self.send_message(msg)", "b in blocks: node0conn.send_header(b) # Send blocks in some random", "int(node0.getbestblockhash(), 16) height = node0.getblockcount() + 1 time = node0.getblock(node0.getbestblockhash())['time']", "height = node0.getblockcount() + 1 time = node0.getblock(node0.getbestblockhash())['time'] + 1", "blocks: node0conn.send_header(b) # Send blocks in some random order for", "we test IBD as well as post-IBD self.extra_args = [", "\"\"\" Test that a node receiving many (potentially out of", "post-IBD self.extra_args = [ [\"-minimumchainwork={:#x}\".format(202 + 2 * NUM_IBD_BLOCKS)]] def", "# The node should eventually, completely sync without getting stuck", "the MIT software license, see the accompanying # file COPYING", "node_synced(): return node0.getbestblockhash() == blocks[-1].hash wait_until(node_synced) if __name__ == '__main__':", "to sync without seizing. \"\"\" import random from test_framework.blocktools import", "in some random order for b in random.sample(blocks, len(blocks)): node0conn.send_block(b)", "node0conn.send_block(b) # The node should eventually, completely sync without getting", "test_framework.util import wait_until, p2p_port NUM_IBD_BLOCKS = 50 class BaseNode(P2PInterface): def", "2018 The Bitcoin developers # Distributed under the MIT software", "\"\"\" import random from test_framework.blocktools import create_block, create_coinbase from test_framework.mininode", "receiving many (potentially out of order) blocks exits initial block", "in random.sample(blocks, len(blocks)): node0conn.send_block(b) # The node should eventually, completely", "passed minimumchainwork) and continues to sync without seizing. \"\"\" import", "(CBlockHeader, network_thread_start, P2PInterface, msg_block, msg_headers) from test_framework.test_framework import BitcoinTestFramework from", "NUM_IBD_BLOCKS = 50 class BaseNode(P2PInterface): def send_header(self, block): msg =" ]
[ "show up directly on the database class Meta: abstract =", "models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel): author = models.ForeignKey( user_model.User, null =", "= models.ForeignKey( Posts, null = True, on_delete = models.CASCADE, related_name", "True, on_delete = models.CASCADE, related_name = \"post_author\" ) caption =", "models.CASCADE, related_name = \"post_author\" ) caption = models.TextField(blank=True) image =", "inheritance. # An often-used pattern class TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True)", "True, on_delete = models.CASCADE, related_name = \"comment_post\" ) contents =", "import models from djangostagram.users import models as user_model # Create", "as user_model # Create your models here. # This class", "image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel): author = models.ForeignKey( user_model.User,", "here. # This class is used in other models as", "= models.TextField(blank=True) image = models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes') class", "# An often-used pattern class TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at", "author = models.ForeignKey( user_model.User, null = True, on_delete = models.CASCADE,", "makes this model to not show up directly on the", "often-used pattern class TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True)", "class Meta: abstract = True class Posts(TimeStamedModel): author = models.ForeignKey(", "= \"comment_author\" ) posts = models.ForeignKey( Posts, null = True,", "models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel): author = models.ForeignKey(", "on_delete = models.CASCADE, related_name = \"comment_author\" ) posts = models.ForeignKey(", "= models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel): author = models.ForeignKey( user_model.User, null", "= True, on_delete = models.CASCADE, related_name = \"comment_post\" ) contents", "\"comment_author\" ) posts = models.ForeignKey( Posts, null = True, on_delete", ") posts = models.ForeignKey( Posts, null = True, on_delete =", "= True class Posts(TimeStamedModel): author = models.ForeignKey( user_model.User, null =", "related_name = \"post_author\" ) caption = models.TextField(blank=True) image = models.ImageField(blank=True)", "models.TextField(blank=True) image = models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel):", "class is used in other models as an inheritance. #", "= models.CASCADE, related_name = \"post_author\" ) caption = models.TextField(blank=True) image", "True class Posts(TimeStamedModel): author = models.ForeignKey( user_model.User, null = True,", "caption = models.TextField(blank=True) image = models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes')", "models.DateTimeField(auto_now_add=True) # An option that makes this model to not", "from django.db import models from djangostagram.users import models as user_model", "your models here. # This class is used in other", "user_model.User, null = True, on_delete = models.CASCADE, related_name = \"post_author\"", "this model to not show up directly on the database", "database class Meta: abstract = True class Posts(TimeStamedModel): author =", "= True, on_delete = models.CASCADE, related_name = \"post_author\" ) caption", ") caption = models.TextField(blank=True) image = models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User,", "from djangostagram.users import models as user_model # Create your models", "in other models as an inheritance. # An often-used pattern", "An option that makes this model to not show up", "that makes this model to not show up directly on", "models as user_model # Create your models here. # This", "related_name='post_image_likes') class Comments(TimeStamedModel): author = models.ForeignKey( user_model.User, null = True,", "TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) # An option", "pattern class TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) #", "used in other models as an inheritance. # An often-used", "option that makes this model to not show up directly", "import models as user_model # Create your models here. #", "models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) # An option that makes this", "null = True, on_delete = models.CASCADE, related_name = \"comment_post\" )", "models as an inheritance. # An often-used pattern class TimeStamedModel(models.Model):", "Create your models here. # This class is used in", "posts = models.ForeignKey( Posts, null = True, on_delete = models.CASCADE,", "abstract = True class Posts(TimeStamedModel): author = models.ForeignKey( user_model.User, null", "Posts, null = True, on_delete = models.CASCADE, related_name = \"comment_post\"", "class Posts(TimeStamedModel): author = models.ForeignKey( user_model.User, null = True, on_delete", "# An option that makes this model to not show", "an inheritance. # An often-used pattern class TimeStamedModel(models.Model): created_at =", "on_delete = models.CASCADE, related_name = \"comment_post\" ) contents = models.TextField(blank=True)", "model to not show up directly on the database class", "not show up directly on the database class Meta: abstract", "directly on the database class Meta: abstract = True class", "user_model.User, null = True, on_delete = models.CASCADE, related_name = \"comment_author\"", "related_name = \"comment_author\" ) posts = models.ForeignKey( Posts, null =", "other models as an inheritance. # An often-used pattern class", "the database class Meta: abstract = True class Posts(TimeStamedModel): author", "created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) # An option that", "= models.CASCADE, related_name = \"comment_author\" ) posts = models.ForeignKey( Posts,", "on the database class Meta: abstract = True class Posts(TimeStamedModel):", "This class is used in other models as an inheritance.", "<reponame>hongsemy/InstagramWithDjango from django.db import models from djangostagram.users import models as", "= True, on_delete = models.CASCADE, related_name = \"comment_author\" ) posts", "= models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) # An option that makes", "models.ForeignKey( Posts, null = True, on_delete = models.CASCADE, related_name =", "as an inheritance. # An often-used pattern class TimeStamedModel(models.Model): created_at", "null = True, on_delete = models.CASCADE, related_name = \"comment_author\" )", "is used in other models as an inheritance. # An", "django.db import models from djangostagram.users import models as user_model #", "up directly on the database class Meta: abstract = True", "# Create your models here. # This class is used", "djangostagram.users import models as user_model # Create your models here.", "null = True, on_delete = models.CASCADE, related_name = \"post_author\" )", "True, on_delete = models.CASCADE, related_name = \"comment_author\" ) posts =", "class TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now_add=True) # An", "models.ForeignKey( user_model.User, null = True, on_delete = models.CASCADE, related_name =", "class Comments(TimeStamedModel): author = models.ForeignKey( user_model.User, null = True, on_delete", "= models.ForeignKey( user_model.User, null = True, on_delete = models.CASCADE, related_name", "image = models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel): author", "to not show up directly on the database class Meta:", "models here. # This class is used in other models", "models.CASCADE, related_name = \"comment_author\" ) posts = models.ForeignKey( Posts, null", "Posts(TimeStamedModel): author = models.ForeignKey( user_model.User, null = True, on_delete =", "# This class is used in other models as an", "An often-used pattern class TimeStamedModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) updated_at =", "updated_at = models.DateTimeField(auto_now_add=True) # An option that makes this model", "= models.DateTimeField(auto_now_add=True) # An option that makes this model to", "on_delete = models.CASCADE, related_name = \"post_author\" ) caption = models.TextField(blank=True)", "\"post_author\" ) caption = models.TextField(blank=True) image = models.ImageField(blank=True) image_likes =", "user_model # Create your models here. # This class is", "Comments(TimeStamedModel): author = models.ForeignKey( user_model.User, null = True, on_delete =", "= models.ImageField(blank=True) image_likes = models.ManyToManyField(user_model.User, related_name='post_image_likes') class Comments(TimeStamedModel): author =", "Meta: abstract = True class Posts(TimeStamedModel): author = models.ForeignKey( user_model.User,", "models from djangostagram.users import models as user_model # Create your", "= \"post_author\" ) caption = models.TextField(blank=True) image = models.ImageField(blank=True) image_likes" ]
[ "IWorkflowChangedEvent from guillotina.events import ObjectEvent from zope.interface import implementer @implementer(IWorkflowChangedEvent)", "guillotina.events import ObjectEvent from zope.interface import implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent):", "from guillotina.contrib.workflows.interfaces import IWorkflowChangedEvent from guillotina.events import ObjectEvent from zope.interface", "object) self.object = object self.workflow = workflow self.action = action", "__init__(self, object, workflow, action, comments): ObjectEvent.__init__(self, object) self.object = object", "import implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An object has been moved\"\"\"", "WorkflowChangedEvent(ObjectEvent): \"\"\"An object has been moved\"\"\" def __init__(self, object, workflow,", "import ObjectEvent from zope.interface import implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An", "moved\"\"\" def __init__(self, object, workflow, action, comments): ObjectEvent.__init__(self, object) self.object", "action, comments): ObjectEvent.__init__(self, object) self.object = object self.workflow = workflow", "from zope.interface import implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An object has", "ObjectEvent from zope.interface import implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An object", "class WorkflowChangedEvent(ObjectEvent): \"\"\"An object has been moved\"\"\" def __init__(self, object,", "self.object = object self.workflow = workflow self.action = action self.comments", "object has been moved\"\"\" def __init__(self, object, workflow, action, comments):", "been moved\"\"\" def __init__(self, object, workflow, action, comments): ObjectEvent.__init__(self, object)", "def __init__(self, object, workflow, action, comments): ObjectEvent.__init__(self, object) self.object =", "= object self.workflow = workflow self.action = action self.comments =", "import IWorkflowChangedEvent from guillotina.events import ObjectEvent from zope.interface import implementer", "ObjectEvent.__init__(self, object) self.object = object self.workflow = workflow self.action =", "from guillotina.events import ObjectEvent from zope.interface import implementer @implementer(IWorkflowChangedEvent) class", "comments): ObjectEvent.__init__(self, object) self.object = object self.workflow = workflow self.action", "zope.interface import implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An object has been", "workflow, action, comments): ObjectEvent.__init__(self, object) self.object = object self.workflow =", "guillotina.contrib.workflows.interfaces import IWorkflowChangedEvent from guillotina.events import ObjectEvent from zope.interface import", "object self.workflow = workflow self.action = action self.comments = comments", "\"\"\"An object has been moved\"\"\" def __init__(self, object, workflow, action,", "object, workflow, action, comments): ObjectEvent.__init__(self, object) self.object = object self.workflow", "@implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An object has been moved\"\"\" def __init__(self,", "implementer @implementer(IWorkflowChangedEvent) class WorkflowChangedEvent(ObjectEvent): \"\"\"An object has been moved\"\"\" def", "has been moved\"\"\" def __init__(self, object, workflow, action, comments): ObjectEvent.__init__(self," ]
[ "Validates the cleaning rule which deletes or updates the data", "cdr_cleaner.clean_cdr_engine as clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries:", "desc = \"Suppress COVID EHR vaccine concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc,", "cleaning rule which deletes or updates the data from the", "join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id = concept_id_1 where concept_id_2 in (select", "= 'RxNorm - SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ), concepts_via_ca", "= clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query in query_list:", "Method to run validation on cleaning rules that will be", "distinct select distinct * from concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ):", "create_suppression_lookup_table(self, client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table)", "concepts_via_ca union distinct select distinct * from concepts_via_cr \"\"\") class", "where ca.ancestor_concept_id in (select concept_id from covid_vacc) ) select distinct", "in ('PPI') ) OR ( -- done by code and", "FROM `{{project_id}}.{{dataset_id}}.concept` WHERE ( -- done by name and vocab", "-- done by code and vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and", "of values which adhere to a condition we are looking", "counts of the affected tables. Raises RunTimeError if the validation", "which adhere to a condition we are looking for. if", "done by code and vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id", "updated no longer exists in the table. if your class", "hasattr(result, 'errors') and result.errors: LOGGER.error(f\"Error running job {result.job_id}: {result.errors}\") raise", "SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ), concepts_via_ca as ( select", "join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on c.concept_id = ca.descendant_concept_id where ca.ancestor_concept_id", "= COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query)", "implementing the validation that checks if the date time values", "\"\"\" Run required steps for validation setup Method to run", "we are looking for. if your class deletes a subset", "of the tables prior to applying cleaning rule \"\"\" raise", "left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on c.concept_id = ca.descendant_concept_id where", "a subset of rows in the tables you should be", "r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id = 'CPT4' ) ), concepts_via_cr as (", "import logging # Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from", "code and vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id = 'CVX'", "the logic to get the initial list of values which", "\"\"\" Suppress COVID EHR vaccine concepts. Original Issues: DC-1692 \"\"\"", "def validate_rule(self, client, *args, **keyword_args): \"\"\" Validates the cleaning rule", "run to setup validation on cleaning rules that will be", "setup Method to run to setup validation on cleaning rules", "are looking for. if your class deletes a subset of", "# and concept_id_1 not in (select concept_id from covid_vacc) and", "vaccine concepts. Original Issues: DC-1692 \"\"\" # Python imports import", "issue numbers, description and affected datasets. As other tickets may", "to initial row counts of the affected tables. Raises RunTimeError", "and vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id", "CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc as (", "logic to get the initial list of values which adhere", "should be implementing the validation that checks if the count", "counts + deleted rows should equals to initial row counts", "if hasattr(result, 'errors') and result.errors: LOGGER.error(f\"Error running job {result.job_id}: {result.errors}\")", "__init__(self, project_id, dataset_id, sandbox_dataset_id, table_namer=None): \"\"\" Initialize the class with", "row counts of the affected tables. Raises RunTimeError if the", "# Third party imports from google.cloud.exceptions import GoogleCloudError LOGGER =", "Run required steps for validation setup Method to run to", "Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import clean_cdr", "on c.concept_id = ca.descendant_concept_id where ca.ancestor_concept_id in (select concept_id from", "CDM_TABLES from utils import pipeline_logging # Third party imports from", "- SPL') OR (relationship_id = 'RxNorm - SPL' and REGEXP_CONTAINS(concept_name,", "for query in query_list: LOGGER.info(query) else: clean_engine.add_console_logging(ARGS.console_log) clean_engine.clean_dataset(ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id,", "REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc as ( SELECT *", "of rows in the tables you should be implementing the", "counts of the tables prior to applying cleaning rule \"\"\"", "fix me.\") if __name__ == '__main__': import cdr_cleaner.args_parser as parser", "'RxNorm - SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ), concepts_via_ca as", "{result.job_id}: {result.errors}\") raise GoogleCloudError( f\"Error running job {result.job_id}: {result.errors}\") def", "row counts of the tables prior to applying cleaning rule", "dataset_id, sandbox_dataset_id, table_namer=None): \"\"\" Initialize the class with proper information.", "+ deleted rows should equals to initial row counts of", "OR (relationship_id = 'RxNorm - SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) )", "tables you should be implementing the logic to get the", "with covid_vacc as ( SELECT * FROM `{{project_id}}.{{dataset_id}}.concept` WHERE (", "append them to the list of Jira Issues. DO NOT", "('Subsumes', 'RxNorm dose form of', 'Dose form group of', 'RxNorm", "steps for validation setup Method to run to setup validation", "distinct select distinct * from concepts_via_ca union distinct select distinct", "updates all the datetime fields you should be implementing the", "__name__ == '__main__': import cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine as", "ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list =", "( -- done by code and vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)')", "project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query) result =", "running job {result.job_id}: {result.errors}\") def validate_rule(self, client, *args, **keyword_args): \"\"\"", "AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id not in ('PPI') ) OR", "and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ), concepts_via_ca as ( select c.*", "implementing the logic to get the initial list of values", "rules that will be updating the values. For example: if", "from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import clean_cdr as cdr_consts", "As other tickets may affect this SQL, append them to", "to a condition we are looking for. if your class", "**keyword_args): \"\"\" Run required steps for validation setup Method to", "c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on", "distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id", "`{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id = concept_id_1 where concept_id_2 in (select concept_id", "* from concepts_via_ca union distinct select distinct * from concepts_via_cr", "ISSUE NUMBERS! \"\"\" desc = \"Suppress COVID EHR vaccine concepts.\"", "a condition we are looking for. if your class deletes", "deleted rows should equals to initial row counts of the", "= ca.descendant_concept_id where ca.ancestor_concept_id in (select concept_id from covid_vacc) )", "validation setup Method to run to setup validation on cleaning", "`{{project_id}}.{{dataset_id}}.concept` WHERE ( -- done by name and vocab --", "COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query) result", "r'(?i)(COVID)')) ) ), concepts_via_ca as ( select c.* from `{{project_id}}.{{dataset_id}}.concept`as", "- SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ), concepts_via_ca as (", "and affected datasets. As other tickets may affect this SQL,", "deletes or updates the data from the tables Method to", "the data from the tables Method to run validation on", "Initialize the class with proper information. Set the issue numbers,", "add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id,", "form group of', 'RxNorm - SPL') OR (relationship_id = 'RxNorm", "import GoogleCloudError LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY =", "super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer)", "from the tables Method to run validation on cleaning rules", "logic to get the row counts of the tables prior", "c.concept_id = concept_id_1 where concept_id_2 in (select concept_id from covid_vacc)", "# Python imports import logging # Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression", "job {result.job_id}: {result.errors}\") def validate_rule(self, client, *args, **keyword_args): \"\"\" Validates", "clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query in query_list: LOGGER.info(query)", "`{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id = concept_id_1 where", "as parser import cdr_cleaner.clean_cdr_engine as clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG,", "covid_vacc) ) select distinct * from covid_vacc union distinct select", "the values. For example: if your class updates all the", "to be updated no longer exists in the table. if", "= 'CVX' ) OR ( -- done by code and", "datasets. As other tickets may affect this SQL, append them", "clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list", "class with proper information. Set the issue numbers, description and", "updating the values. For example: if your class updates all", "the table. if your class deletes a subset of rows", "distinct * from covid_vacc union distinct select distinct * from", "from google.cloud.exceptions import GoogleCloudError LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts'", "me.\") def setup_validation(self, client, *args, **keyword_args): \"\"\" Run required steps", "all the datetime fields you should be implementing the validation", "done by code and vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id", "pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id,", "count of final final row counts + deleted rows should", "vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id not", "to run to setup validation on cleaning rules that will", "\"\"\" Validates the cleaning rule which deletes or updates the", "rows in the tables you should be implementing the logic", "that needs to be updated no longer exists in the", "REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ), concepts_via_ca as ( select c.* from", "datetime fields you should be implementing the validation that checks", "in ('Subsumes', 'RxNorm dose form of', 'Dose form group of',", "required steps for validation setup Method to run to setup", "= concept_id_1 where concept_id_2 in (select concept_id from covid_vacc) #", "concept_id_2 in (select concept_id from covid_vacc) # and concept_id_1 not", "GoogleCloudError LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\"", "('PPI') ) OR ( -- done by code and vocab", "if ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)])", "be implementing the validation that checks if the date time", "from utils import pipeline_logging # Third party imports from google.cloud.exceptions", "= 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}`", "Suppress COVID EHR vaccine concepts. Original Issues: DC-1692 \"\"\" #", "description and affected datasets. As other tickets may affect this", "deletes a subset of rows in the tables you should", "values. For example: if your class updates all the datetime", "= parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list(", "if the count of final final row counts + deleted", "*args, **keyword_args): \"\"\" Validates the cleaning rule which deletes or", "tables. Raises RunTimeError if the validation fails. \"\"\" raise NotImplementedError(\"Please", "COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with", "of', 'RxNorm - SPL') OR (relationship_id = 'RxNorm - SPL'", "that will be updating the values. For example: if your", "), concepts_via_ca as ( select c.* from `{{project_id}}.{{dataset_id}}.concept`as c left", "RunTimeError if the validation fails. \"\"\" raise NotImplementedError(\"Please fix me.\")", "final row counts + deleted rows should equals to initial", "validation fails. \"\"\" raise NotImplementedError(\"Please fix me.\") def setup_validation(self, client,", "that checks if the count of final final row counts", "def __init__(self, project_id, dataset_id, sandbox_dataset_id, table_namer=None): \"\"\" Initialize the class", "affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self, client):", "`{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on c.concept_id = ca.descendant_concept_id where ca.ancestor_concept_id in", "rows should equals to initial row counts of the affected", "not in ('PPI') ) OR ( -- done by code", "the cleaning rule which deletes or updates the data from", "rule \"\"\" raise NotImplementedError(\"Please fix me.\") if __name__ == '__main__':", "'CVX' ) OR ( -- done by code and vocab", "in the table. if your class deletes a subset of", "row counts + deleted rows should equals to initial row", "raise GoogleCloudError( f\"Error running job {result.job_id}: {result.errors}\") def validate_rule(self, client,", "will be updating the values. For example: if your class", ") OR ( -- done by code and vocab --", "of final final row counts + deleted rows should equals", "get the initial list of values which adhere to a", "me.\") if __name__ == '__main__': import cdr_cleaner.args_parser as parser import", "\"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self, project_id, dataset_id, sandbox_dataset_id, table_namer=None):", "the date time values that needs to be updated no", "list of Jira Issues. DO NOT REMOVE ORIGINAL JIRA ISSUE", "LOGGER.error(f\"Error running job {result.job_id}: {result.errors}\") raise GoogleCloudError( f\"Error running job", "the list of Jira Issues. DO NOT REMOVE ORIGINAL JIRA", "concept_id_1 where concept_id_2 in (select concept_id from covid_vacc) # and", "TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc as ( SELECT * FROM", "code and vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id = 'CPT4'", "c left join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id = concept_id_1 where concept_id_2", "SQL, append them to the list of Jira Issues. DO", "to setup validation on cleaning rules that will be updating", "rule which deletes or updates the data from the tables", "applying cleaning rule \"\"\" raise NotImplementedError(\"Please fix me.\") if __name__", "where concept_id_2 in (select concept_id from covid_vacc) # and concept_id_1", "form of', 'Dose form group of', 'RxNorm - SPL') OR", "to get the row counts of the tables prior to", "{result.job_id}: {result.errors}\") def validate_rule(self, client, *args, **keyword_args): \"\"\" Validates the", "from covid_vacc) and ( relationship_id not in ('Subsumes', 'RxNorm dose", "fields you should be implementing the logic to get the", "concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self, project_id, dataset_id, sandbox_dataset_id,", "project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self, client): concept_suppression_lookup_query =", "EHR vaccine concepts. Original Issues: DC-1692 \"\"\" # Python imports", "JINJA_ENV, CDM_TABLES from utils import pipeline_logging # Third party imports", "information. Set the issue numbers, description and affected datasets. As", "which deletes or updates the data from the tables Method", "clean_engine.add_console_logging() query_list = clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query", "checks if the date time values that needs to be", "-- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id not in", "Raises RunTimeError if the validation fails. \"\"\" raise NotImplementedError(\"Please fix", "validation that checks if the count of final final row", "`{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc as ( SELECT * FROM `{{project_id}}.{{dataset_id}}.concept`", "(select concept_id from covid_vacc) # and concept_id_1 not in (select", "on c.concept_id = concept_id_1 where concept_id_2 in (select concept_id from", "checks if the count of final final row counts +", "be updating the values. For example: if your class updates", "fields you should be implementing the validation that checks if", "= query_job.result() if hasattr(result, 'errors') and result.errors: LOGGER.error(f\"Error running job", "NotImplementedError(\"Please fix me.\") def setup_validation(self, client, *args, **keyword_args): \"\"\" Run", "concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job =", "run validation on cleaning rules that will be updating the", "def create_suppression_lookup_table(self, client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self.", "will be updating or deleting the values. For example: if", "* FROM `{{project_id}}.{{dataset_id}}.concept` WHERE ( -- done by name and", "the datetime fields you should be implementing the logic to", "client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job", "subset of rows in the tables you should be implementing", "CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self, project_id, dataset_id, sandbox_dataset_id, table_namer=None): \"\"\" Initialize", "cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine as clean_engine ARGS = parser.parse_args()", "or deleting the values. For example: if your class updates", "and ( relationship_id not in ('Subsumes', 'RxNorm dose form of',", "looking for. if your class deletes a subset of rows", "be implementing the logic to get the initial list of", "def setup_validation(self, client, *args, **keyword_args): \"\"\" Run required steps for", "numbers, description and affected datasets. As other tickets may affect", "concept_id_1 not in (select concept_id from covid_vacc) and ( relationship_id", "get the row counts of the tables prior to applying", "sandbox_dataset_id, table_namer=None): \"\"\" Initialize the class with proper information. Set", "dose form of', 'Dose form group of', 'RxNorm - SPL')", "the validation fails. \"\"\" raise NotImplementedError(\"Please fix me.\") def setup_validation(self,", "AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import clean_cdr as cdr_consts from common import", "table_namer=table_namer) def create_suppression_lookup_table(self, client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id,", "if the validation fails. \"\"\" raise NotImplementedError(\"Please fix me.\") def", "validation that checks if the date time values that needs", "SPL') OR (relationship_id = 'RxNorm - SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)'))", "\"\"\" desc = \"Suppress COVID EHR vaccine concepts.\" super().__init__( issue_numbers=['DC1692'],", "done by name and vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name,", "rows in the tables you should be implementing the validation", "from concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self, project_id, dataset_id,", "validate_rule(self, client, *args, **keyword_args): \"\"\" Validates the cleaning rule which", "\"Suppress COVID EHR vaccine concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES,", "import pipeline_logging # Third party imports from google.cloud.exceptions import GoogleCloudError", "AND vocabulary_id not in ('PPI') ) OR ( -- done", "pipeline_logging # Third party imports from google.cloud.exceptions import GoogleCloudError LOGGER", "by code and vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id =", "Method to run to setup validation on cleaning rules that", "should be implementing the logic to get the initial list", "\"\"\" Initialize the class with proper information. Set the issue", "-- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id = 'CVX' ) OR (", "your class deletes a subset of rows in the tables", "with proper information. Set the issue numbers, description and affected", "cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import clean_cdr as cdr_consts from", "select c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca", "from concepts_via_ca union distinct select distinct * from concepts_via_cr \"\"\")", "the tables you should be implementing the validation that checks", "concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query) result = query_job.result() if hasattr(result, 'errors')", "final final row counts + deleted rows should equals to", "dataset_id=self.dataset_id, sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query) result = query_job.result()", "covid_vacc) and ( relationship_id not in ('Subsumes', 'RxNorm dose form", "imports from google.cloud.exceptions import GoogleCloudError LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE =", "initial list of values which adhere to a condition we", "concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE,", "no longer exists in the table. if your class deletes", "OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc as ( SELECT", "class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self, project_id, dataset_id, sandbox_dataset_id, table_namer=None): \"\"\"", "DC-1692 \"\"\" # Python imports import logging # Project imports", "dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self, client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render(", "the issue numbers, description and affected datasets. As other tickets", "( -- done by code and vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)')", "you should be implementing the logic to get the row", "select distinct * from concepts_via_ca union distinct select distinct *", "(select concept_id from covid_vacc) and ( relationship_id not in ('Subsumes',", "class updates all the datetime fields you should be implementing", "distinct * from concepts_via_ca union distinct select distinct * from", "c left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on c.concept_id = ca.descendant_concept_id", "Python imports import logging # Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import", "Issues. DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS! \"\"\" desc", "as cdr_consts from common import JINJA_ENV, CDM_TABLES from utils import", "and concept_id_1 not in (select concept_id from covid_vacc) and (", "needs to be updated no longer exists in the table.", "concept_id from covid_vacc) and ( relationship_id not in ('Subsumes', 'RxNorm", "or updates the data from the tables Method to run", "from covid_vacc union distinct select distinct * from concepts_via_ca union", "sandbox_id=self.sandbox_dataset_id, concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query) result = query_job.result() if", "), concepts_via_cr as ( select distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as c", "that will be updating or deleting the values. For example:", "'RxNorm dose form of', 'Dose form group of', 'RxNorm -", "( SELECT * FROM `{{project_id}}.{{dataset_id}}.concept` WHERE ( -- done by", "'Dose form group of', 'RxNorm - SPL') OR (relationship_id =", "{result.errors}\") raise GoogleCloudError( f\"Error running job {result.job_id}: {result.errors}\") def validate_rule(self,", "tickets may affect this SQL, append them to the list", "logging # Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner", "NUMBERS! \"\"\" desc = \"Suppress COVID EHR vaccine concepts.\" super().__init__(", "relationship_id not in ('Subsumes', 'RxNorm dose form of', 'Dose form", "concepts_via_ca as ( select c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join", "setup_validation(self, client, *args, **keyword_args): \"\"\" Run required steps for validation", "= 'CPT4' ) ), concepts_via_cr as ( select distinct c.*", "initial row counts of the affected tables. Raises RunTimeError if", "by name and vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)')", "r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id not in ('PPI') )", "client.query(concept_suppression_lookup_query) result = query_job.result() if hasattr(result, 'errors') and result.errors: LOGGER.error(f\"Error", "left join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id = concept_id_1 where concept_id_2 in", "select distinct * from concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def", "'RxNorm - SPL') OR (relationship_id = 'RxNorm - SPL' and", "covid_vacc) # and concept_id_1 not in (select concept_id from covid_vacc)", "( -- done by name and vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')", "list of values which adhere to a condition we are", "**keyword_args): \"\"\" Validates the cleaning rule which deletes or updates", "from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id = concept_id_1", "ca on c.concept_id = ca.descendant_concept_id where ca.ancestor_concept_id in (select concept_id", "and result.errors: LOGGER.error(f\"Error running job {result.job_id}: {result.errors}\") raise GoogleCloudError( f\"Error", "= logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE OR", "concepts. Original Issues: DC-1692 \"\"\" # Python imports import logging", "and vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id = 'CVX' )", "vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id = 'CVX' ) OR", "in the tables you should be implementing the logic to", "validation on cleaning rules that will be updating the values.", "you should be implementing the logic to get the initial", "longer exists in the table. if your class deletes a", "For example: if your class updates all the datetime fields", "NotImplementedError(\"Please fix me.\") if __name__ == '__main__': import cdr_cleaner.args_parser as", "cleaning rules that will be updating the values. For example:", "-- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id = 'CPT4' ) ), concepts_via_cr", "REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id = 'CPT4' ) ), concepts_via_cr as", "r'(?i)(VAC)') AND vocabulary_id not in ('PPI') ) OR ( --", "the initial list of values which adhere to a condition", "rules that will be updating or deleting the values. For", "for. if your class deletes a subset of rows in", "c.concept_id = ca.descendant_concept_id where ca.ancestor_concept_id in (select concept_id from covid_vacc)", "updates the data from the tables Method to run validation", "vocabulary_id = 'CVX' ) OR ( -- done by code", "fails. \"\"\" raise NotImplementedError(\"Please fix me.\") def setup_validation(self, client, *args,", "ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query in query_list: LOGGER.info(query) else: clean_engine.add_console_logging(ARGS.console_log) clean_engine.clean_dataset(ARGS.project_id,", "clean_cdr as cdr_consts from common import JINJA_ENV, CDM_TABLES from utils", "WHERE ( -- done by name and vocab -- REGEXP_CONTAINS(concept_name,", "condition we are looking for. if your class deletes a", "* from concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self, project_id,", "to the list of Jira Issues. DO NOT REMOVE ORIGINAL", "them to the list of Jira Issues. DO NOT REMOVE", "SELECT * FROM `{{project_id}}.{{dataset_id}}.concept` WHERE ( -- done by name", "job {result.job_id}: {result.errors}\") raise GoogleCloudError( f\"Error running job {result.job_id}: {result.errors}\")", "google.cloud.exceptions import GoogleCloudError LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY", "( select c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as", "not in ('Subsumes', 'RxNorm dose form of', 'Dose form group", "the logic to get the row counts of the tables", "query_job = client.query(concept_suppression_lookup_query) result = query_job.result() if hasattr(result, 'errors') and", "all the datetime fields you should be implementing the logic", "exists in the table. if your class deletes a subset", "r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id = 'CVX' ) OR ( -- done", "import clean_cdr as cdr_consts from common import JINJA_ENV, CDM_TABLES from", "other tickets may affect this SQL, append them to the", "LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE", "from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on c.concept_id", "class deletes a subset of rows in the tables you", "the tables prior to applying cleaning rule \"\"\" raise NotImplementedError(\"Please", "the affected tables. Raises RunTimeError if the validation fails. \"\"\"", "as ( select distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join", "'errors') and result.errors: LOGGER.error(f\"Error running job {result.job_id}: {result.errors}\") raise GoogleCloudError(", "equals to initial row counts of the affected tables. Raises", "that checks if the date time values that needs to", "( relationship_id not in ('Subsumes', 'RxNorm dose form of', 'Dose", "issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def", "(relationship_id = 'RxNorm - SPL' and REGEXP_CONTAINS(concept_name, r'(?i)(COVID)')) ) ),", "proper information. Set the issue numbers, description and affected datasets.", "name and vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND", "select distinct * from covid_vacc union distinct select distinct *", "and vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id = 'CPT4' )", "and vocabulary_id = 'CPT4' ) ), concepts_via_cr as ( select", "if your class deletes a subset of rows in the", "COVID EHR vaccine concepts. Original Issues: DC-1692 \"\"\" # Python", "ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for", "Third party imports from google.cloud.exceptions import GoogleCloudError LOGGER = logging.getLogger(__name__)", "AS with covid_vacc as ( SELECT * FROM `{{project_id}}.{{dataset_id}}.concept` WHERE", "= JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc", "= client.query(concept_suppression_lookup_query) result = query_job.result() if hasattr(result, 'errors') and result.errors:", "constants.cdr_cleaner import clean_cdr as cdr_consts from common import JINJA_ENV, CDM_TABLES", "# Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import", "affected tables. Raises RunTimeError if the validation fails. \"\"\" raise", "from common import JINJA_ENV, CDM_TABLES from utils import pipeline_logging #", "vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id = 'CPT4' ) ),", "for validation setup Method to run to setup validation on", "implementing the validation that checks if the count of final", "REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id not in ('PPI') ) OR (", "JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS with covid_vacc as", "c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_relationship` on c.concept_id =", "prior to applying cleaning rule \"\"\" raise NotImplementedError(\"Please fix me.\")", "query_job.result() if hasattr(result, 'errors') and result.errors: LOGGER.error(f\"Error running job {result.job_id}:", "of the affected tables. Raises RunTimeError if the validation fails.", ") ), concepts_via_cr as ( select distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as", "REMOVE ORIGINAL JIRA ISSUE NUMBERS! \"\"\" desc = \"Suppress COVID", "not in (select concept_id from covid_vacc) and ( relationship_id not", "concept_id from covid_vacc) # and concept_id_1 not in (select concept_id", "implementing the logic to get the row counts of the", "table. if your class deletes a subset of rows in", ") select distinct * from covid_vacc union distinct select distinct", "as clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging()", "be updated no longer exists in the table. if your", "import JINJA_ENV, CDM_TABLES from utils import pipeline_logging # Third party", "-- done by name and vocab -- REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND", "of Jira Issues. DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS!", "the count of final final row counts + deleted rows", "EHR vaccine concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id,", "date time values that needs to be updated no longer", "covid_vacc union distinct select distinct * from concepts_via_ca union distinct", "the validation that checks if the count of final final", "[(CovidEHRVaccineConceptSuppression,)]) for query in query_list: LOGGER.info(query) else: clean_engine.add_console_logging(ARGS.console_log) clean_engine.clean_dataset(ARGS.project_id, ARGS.dataset_id,", "ca.descendant_concept_id where ca.ancestor_concept_id in (select concept_id from covid_vacc) ) select", "distinct * from concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression ): def __init__(self,", "the class with proper information. Set the issue numbers, description", "the datetime fields you should be implementing the validation that", "vaccine concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id,", "affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self, client): concept_suppression_lookup_query", "{result.errors}\") def validate_rule(self, client, *args, **keyword_args): \"\"\" Validates the cleaning", ") ), concepts_via_ca as ( select c.* from `{{project_id}}.{{dataset_id}}.concept`as c", "raise NotImplementedError(\"Please fix me.\") if __name__ == '__main__': import cdr_cleaner.args_parser", "'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE TABLE `{{project_id}}.{{sandbox_id}}.{{concept_suppression_lookup_table}}` AS", "*args, **keyword_args): \"\"\" Run required steps for validation setup Method", "import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import clean_cdr as cdr_consts from common", "'__main__': import cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine as clean_engine ARGS", "as ( SELECT * FROM `{{project_id}}.{{dataset_id}}.concept` WHERE ( -- done", "ca.ancestor_concept_id in (select concept_id from covid_vacc) ) select distinct *", "concept_suppression_lookup_table=self. concept_suppression_lookup_table) query_job = client.query(concept_suppression_lookup_query) result = query_job.result() if hasattr(result,", "query in query_list: LOGGER.info(query) else: clean_engine.add_console_logging(ARGS.console_log) clean_engine.clean_dataset(ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)])", "ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query in query_list: LOGGER.info(query) else: clean_engine.add_console_logging(ARGS.console_log)", "`{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_ancestor` as ca on c.concept_id =", "if __name__ == '__main__': import cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine", "\"\"\" raise NotImplementedError(\"Please fix me.\") def setup_validation(self, client, *args, **keyword_args):", "project_id, dataset_id, sandbox_dataset_id, table_namer=None): \"\"\" Initialize the class with proper", "select distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_relationship` on", "logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE", "if the date time values that needs to be updated", "REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and vocabulary_id = 'CVX' ) OR ( --", "in the tables you should be implementing the validation that", "cleaning rules that will be updating or deleting the values.", "setup validation on cleaning rules that will be updating or", "table_namer=None): \"\"\" Initialize the class with proper information. Set the", "and vocabulary_id = 'CVX' ) OR ( -- done by", "data from the tables Method to run validation on cleaning", "description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id, dataset_id=dataset_id, sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self,", "client, *args, **keyword_args): \"\"\" Validates the cleaning rule which deletes", "should equals to initial row counts of the affected tables.", "validation on cleaning rules that will be updating or deleting", "of', 'Dose form group of', 'RxNorm - SPL') OR (relationship_id", "== '__main__': import cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine as clean_engine", "import cdr_cleaner.args_parser as parser import cdr_cleaner.clean_cdr_engine as clean_engine ARGS =", "common import JINJA_ENV, CDM_TABLES from utils import pipeline_logging # Third", "concepts_via_cr as ( select distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as c left", "be implementing the validation that checks if the count of", "on cleaning rules that will be updating the values. For", "to get the initial list of values which adhere to", "DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS! \"\"\" desc =", "vocabulary_id = 'CPT4' ) ), concepts_via_cr as ( select distinct", "should be implementing the logic to get the row counts", "from constants.cdr_cleaner import clean_cdr as cdr_consts from common import JINJA_ENV,", "-- done by code and vocab -- REGEXP_CONTAINS(concept_code, r'(207)|(208)|(210)|(211)|(212)') and", "be implementing the logic to get the row counts of", "client, *args, **keyword_args): \"\"\" Run required steps for validation setup", "import cdr_cleaner.clean_cdr_engine as clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if", "cleaning rule \"\"\" raise NotImplementedError(\"Please fix me.\") if __name__ ==", "concept_id from covid_vacc) ) select distinct * from covid_vacc union", "affect this SQL, append them to the list of Jira", "affected datasets. As other tickets may affect this SQL, append", "by code and vocab -- REGEXP_CONTAINS(concept_code, r'(91300)|(91301)|(91302)|(91303)|(91304)') and vocabulary_id =", "your class updates all the datetime fields you should be", "example: if your class updates all the datetime fields you", "updating or deleting the values. For example: if your class", "f\"Error running job {result.job_id}: {result.errors}\") def validate_rule(self, client, *args, **keyword_args):", "to run validation on cleaning rules that will be updating", "cdr_consts from common import JINJA_ENV, CDM_TABLES from utils import pipeline_logging", "values that needs to be updated no longer exists in", "values which adhere to a condition we are looking for.", "Set the issue numbers, description and affected datasets. As other", "JIRA ISSUE NUMBERS! \"\"\" desc = \"Suppress COVID EHR vaccine", "time values that needs to be updated no longer exists", "result = query_job.result() if hasattr(result, 'errors') and result.errors: LOGGER.error(f\"Error running", "COVID EHR vaccine concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID], affected_tables=CDM_TABLES, project_id=project_id,", "datetime fields you should be implementing the logic to get", "\"\"\" # Python imports import logging # Project imports from", "adhere to a condition we are looking for. if your", "Issues: DC-1692 \"\"\" # Python imports import logging # Project", "concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self, client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id, dataset_id=self.dataset_id,", "the tables Method to run validation on cleaning rules that", "the tables you should be implementing the logic to get", "\"\"\" raise NotImplementedError(\"Please fix me.\") if __name__ == '__main__': import", "should be implementing the validation that checks if the date", "utils import pipeline_logging # Third party imports from google.cloud.exceptions import", "GoogleCloudError( f\"Error running job {result.job_id}: {result.errors}\") def validate_rule(self, client, *args,", "* from covid_vacc union distinct select distinct * from concepts_via_ca", "running job {result.job_id}: {result.errors}\") raise GoogleCloudError( f\"Error running job {result.job_id}:", "query_list = clean_engine.get_query_list( ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query in", "the row counts of the tables prior to applying cleaning", "vocabulary_id not in ('PPI') ) OR ( -- done by", "if your class updates all the datetime fields you should", "OR ( -- done by code and vocab -- REGEXP_CONTAINS(concept_code,", "(select concept_id from covid_vacc) ) select distinct * from covid_vacc", "Jira Issues. DO NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS! \"\"\"", "tables Method to run validation on cleaning rules that will", "'CPT4' ) ), concepts_via_cr as ( select distinct c.* from", "party imports from google.cloud.exceptions import GoogleCloudError LOGGER = logging.getLogger(__name__) SUPPRESSION_RULE_CONCEPT_TABLE", "in (select concept_id from covid_vacc) and ( relationship_id not in", "union distinct select distinct * from concepts_via_cr \"\"\") class CovidEHRVaccineConceptSuppression(AbstractBqLookupTableConceptSuppression", "ORIGINAL JIRA ISSUE NUMBERS! \"\"\" desc = \"Suppress COVID EHR", "may affect this SQL, append them to the list of", "as ca on c.concept_id = ca.descendant_concept_id where ca.ancestor_concept_id in (select", "covid_vacc as ( SELECT * FROM `{{project_id}}.{{dataset_id}}.concept` WHERE ( --", "imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression from constants.cdr_cleaner import clean_cdr as", "tables you should be implementing the validation that checks if", "raise NotImplementedError(\"Please fix me.\") def setup_validation(self, client, *args, **keyword_args): \"\"\"", "you should be implementing the validation that checks if the", "imports import logging # Project imports from cdr_cleaner.cleaning_rules.deid.concept_suppression import AbstractBqLookupTableConceptSuppression", "in (select concept_id from covid_vacc) # and concept_id_1 not in", "): def __init__(self, project_id, dataset_id, sandbox_dataset_id, table_namer=None): \"\"\" Initialize the", "Original Issues: DC-1692 \"\"\" # Python imports import logging #", "the validation that checks if the date time values that", "SUPPRESSION_RULE_CONCEPT_TABLE = 'covid_vaccine_concepts' COVID_VACCINE_CONCEPT_QUERY = JINJA_ENV.from_string(\"\"\" CREATE OR REPLACE TABLE", "from covid_vacc) ) select distinct * from covid_vacc union distinct", "parser import cdr_cleaner.clean_cdr_engine as clean_engine ARGS = parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True)", "ARGS.project_id, ARGS.dataset_id, ARGS.sandbox_dataset_id, [(CovidEHRVaccineConceptSuppression,)]) for query in query_list: LOGGER.info(query) else:", "union distinct select distinct * from concepts_via_ca union distinct select", "this SQL, append them to the list of Jira Issues.", "NOT REMOVE ORIGINAL JIRA ISSUE NUMBERS! \"\"\" desc = \"Suppress", "fix me.\") def setup_validation(self, client, *args, **keyword_args): \"\"\" Run required", "tables prior to applying cleaning rule \"\"\" raise NotImplementedError(\"Please fix", "parser.parse_args() pipeline_logging.configure(level=logging.DEBUG, add_console_handler=True) if ARGS.list_queries: clean_engine.add_console_logging() query_list = clean_engine.get_query_list( ARGS.project_id,", "on cleaning rules that will be updating or deleting the", "be updating or deleting the values. For example: if your", "as ( select c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_ancestor`", "sandbox_dataset_id=sandbox_dataset_id, concept_suppression_lookup_table=SUPPRESSION_RULE_CONCEPT_TABLE, table_namer=table_namer) def create_suppression_lookup_table(self, client): concept_suppression_lookup_query = COVID_VACCINE_CONCEPT_QUERY.render( project_id=self.project_id,", "to applying cleaning rule \"\"\" raise NotImplementedError(\"Please fix me.\") if", "deleting the values. For example: if your class updates all", "result.errors: LOGGER.error(f\"Error running job {result.job_id}: {result.errors}\") raise GoogleCloudError( f\"Error running", "group of', 'RxNorm - SPL') OR (relationship_id = 'RxNorm -", "( select distinct c.* from `{{project_id}}.{{dataset_id}}.concept`as c left join `{{project_id}}.{{dataset_id}}.concept_relationship`", "in (select concept_id from covid_vacc) ) select distinct * from", "from covid_vacc) # and concept_id_1 not in (select concept_id from", "= \"Suppress COVID EHR vaccine concepts.\" super().__init__( issue_numbers=['DC1692'], description=desc, affected_datasets=[cdr_consts.REGISTERED_TIER_DEID],", "REGEXP_CONTAINS(concept_name, r'(?i)(COVID)') AND REGEXP_CONTAINS(concept_name, r'(?i)(VAC)') AND vocabulary_id not in ('PPI')" ]
[ "requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.json(), None except JSONDecodeError as", "code indicates something went wrong try: return response.json(), str(response.status_code) except", "send_upload(query_url: str, data: Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]:", "return None, str(response.status_code) return response.json(), None except requests.exceptions.HTTPError as e:", "!= 201: # The returned status code indicates something went", "to DBHub.io, formatting the returned result as JSON Parameters ----------", "a request to DBHub.io. Parameters ---- query_url : str url", "None except requests.exceptions.HTTPError as e: return None, e.args[0] except requests.exceptions.RequestException", "def send_upload(query_url: str, data: Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any],", "data : Dict[str, Any] data to be processed to the", "wrong try: return response.json(), str(response.status_code) except JSONDecodeError: return None, str(response.status_code)", "f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.json(),", "str, data: Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]: \"\"\"", "The returned status code indicates something went wrong try: return", "e.args(0) return None, str(cause.args[0]) def send_upload(query_url: str, data: Dict[str, Any],", "db_bytes : io.BufferedReader A buffered binary stream of the database", "list of JSON object. - a string describe error if", "Dict[str, Any] data to be processed to the server.------ Returns", "JSONDecodeError import requests import io def send_request_json(query_url: str, data: Dict[str,", "file is returned as a list of bytes \"\"\" try:", "headers=headers) response.raise_for_status() return response.content, None except requests.exceptions.HTTPError as e: return", "endpoint data : Dict[str, Any] data to be processed to", "server.------ Returns ------- List[bytes] database file is returned as a", "JSONDecodeError: return None, str(response.status_code) return response.json(), None except requests.exceptions.HTTPError as", ": Dict[str, Any] data to be processed to the server.", "io.BufferedReader A buffered binary stream of the database file. Returns", "returned as a list of bytes \"\"\" try: headers =", "# The returned status code indicates something went wrong try:", "binary stream of the database file. Returns ------- Tuple[List[Any], str]", "\"\"\" send_request sends a request to DBHub.io. Parameters ---- query_url", "------- List[bytes] database file is returned as a list of", "Tuple[List[bytes], str]: \"\"\" send_request sends a request to DBHub.io. Parameters", "be processed to the server. db_bytes : io.BufferedReader A buffered", "return None, e.args[0] except TypeError as e: return None, e.args[0]", "send_request(query_url: str, data: Dict[str, Any]) -> Tuple[List[bytes], str]: \"\"\" send_request", "response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.json(), None except", "Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]: \"\"\" send_upload uploads a", "cause = e.args(0) return None, str(cause.args[0]) def send_request(query_url: str, data:", "db_bytes} response = requests.post(query_url, data=data, headers=headers, files=files) response.raise_for_status() if response.status_code", "headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers)", "response.json(), None except requests.exceptions.HTTPError as e: try: return response.json(), e.args[0]", "stream of the database file. Returns ------- Tuple[List[Any], str] The", "error if occurs \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}", "= requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.json(), None except JSONDecodeError", "201: # The returned status code indicates something went wrong", "= requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.content, None except requests.exceptions.HTTPError", "return None, str(cause.args[0]) def send_request(query_url: str, data: Dict[str, Any]) ->", "the server.------ Returns ------- List[bytes] database file is returned as", "data is - a list of JSON object. - a", "-> Tuple[List[Any], str]: \"\"\" send_upload uploads a database to DBHub.io.", "str, data: Dict[str, Any]) -> Tuple[List[Any], str]: \"\"\" send_request_json sends", "= requests.post(query_url, data=data, headers=headers, files=files) response.raise_for_status() if response.status_code != 201:", "return response.json(), e.args[0] except JSONDecodeError: return None, e.args[0] except requests.exceptions.RequestException", "bytes \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response =", "the database file. Returns ------- Tuple[List[Any], str] The returned data", "typing import Any, Dict, List, Tuple from json.decoder import JSONDecodeError", "e: return None, e.args[0] except requests.exceptions.HTTPError as e: try: return", "= {\"file\": db_bytes} response = requests.post(query_url, data=data, headers=headers, files=files) response.raise_for_status()", "e: return None, e.args[0] except TypeError as e: return None,", "to the server.------ Returns ------- List[bytes] database file is returned", "is returned as a list of bytes \"\"\" try: headers", "a list of JSON object. - a string describe error", "object. - a string describe error if occurs \"\"\" try:", "= {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers) response.raise_for_status()", "requests.exceptions.HTTPError as e: try: return response.json(), e.args[0] except JSONDecodeError: return", "API endpoint. data : Dict[str, Any] data to be processed", "requests.post(query_url, data=data, headers=headers, files=files) response.raise_for_status() if response.status_code != 201: #", "Tuple[List[Any], str] The returned data is - a list of", "try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url, data=data,", "A buffered binary stream of the database file. Returns -------", "\"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} files = {\"file\":", "v{pydbhub.__version__}'} files = {\"file\": db_bytes} response = requests.post(query_url, data=data, headers=headers,", "response.json(), None except JSONDecodeError as e: return None, e.args[0] except", "something went wrong try: return response.json(), str(response.status_code) except JSONDecodeError: return", "None, e.args[0] except TypeError as e: return None, e.args[0] except", "None except JSONDecodeError as e: return None, e.args[0] except TypeError", "None, e.args[0] except requests.exceptions.RequestException as e: cause = e.args(0) return", "---- query_url : str url of the API endpoint data", "as JSON Parameters ---------- query_url : str url of the", "import Any, Dict, List, Tuple from json.decoder import JSONDecodeError import", "str] The returned data is - a list of JSON", "return None, e.args[0] except requests.exceptions.RequestException as e: cause = e.args(0)", "- a list of JSON object. - a string describe", "db_bytes: io.BufferedReader) -> Tuple[List[Any], str]: \"\"\" send_upload uploads a database", "database to DBHub.io. Parameters ---------- query_url : str url of", "the server. Returns ------- Tuple[List[Any], str] The returned data is", "the API endpoint. data : Dict[str, Any] data to be", "---------- query_url : str url of the API endpoint data", "response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.content, None except", "response.json(), str(response.status_code) except JSONDecodeError: return None, str(response.status_code) return response.json(), None", "{'User-Agent': f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return", "the returned result as JSON Parameters ---------- query_url : str", "returned data is - a list of JSON object. -", "returned status code indicates something went wrong try: return response.json(),", "try: return response.json(), str(response.status_code) except JSONDecodeError: return None, str(response.status_code) return", "from json.decoder import JSONDecodeError import requests import io def send_request_json(query_url:", "indicates something went wrong try: return response.json(), str(response.status_code) except JSONDecodeError:", "of bytes \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response", "str url of the API endpoint data : Dict[str, Any]", "-> Tuple[List[Any], str]: \"\"\" send_request_json sends a request to DBHub.io,", "send_request_json(query_url: str, data: Dict[str, Any]) -> Tuple[List[Any], str]: \"\"\" send_request_json", "requests.exceptions.RequestException as e: cause = e.args(0) return None, str(cause.args[0]) def", "e: cause = e.args(0) return None, str(cause.args[0]) def send_request(query_url: str,", "data=data, headers=headers, files=files) response.raise_for_status() if response.status_code != 201: # The", "as e: return None, e.args[0] except requests.exceptions.RequestException as e: cause", "str]: \"\"\" send_request sends a request to DBHub.io. Parameters ----", "to DBHub.io. Parameters ---------- query_url : str url of the", "file. Returns ------- Tuple[List[Any], str] The returned data is -", "str(cause.args[0]) def send_request(query_url: str, data: Dict[str, Any]) -> Tuple[List[bytes], str]:", "def send_request(query_url: str, data: Dict[str, Any]) -> Tuple[List[bytes], str]: \"\"\"", "Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]: \"\"\" send_upload uploads", "to be processed to the server. Returns ------- Tuple[List[Any], str]", "as e: cause = e.args(0) return None, str(cause.args[0]) def send_upload(query_url:", "if occurs \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} files", "The returned data is - a list of JSON object.", "to the server. Returns ------- Tuple[List[Any], str] The returned data", "as e: cause = e.args(0) return None, str(cause.args[0]) def send_request(query_url:", "headers=headers, files=files) response.raise_for_status() if response.status_code != 201: # The returned", "Dict[str, Any]) -> Tuple[List[Any], str]: \"\"\" send_request_json sends a request", "a database to DBHub.io. Parameters ---------- query_url : str url", "DBHub.io. Parameters ---------- query_url : str url of the API", "database file is returned as a list of bytes \"\"\"", "as a list of bytes \"\"\" try: headers = {'User-Agent':", "str(cause.args[0]) def send_upload(query_url: str, data: Dict[str, Any], db_bytes: io.BufferedReader) ->", "io def send_request_json(query_url: str, data: Dict[str, Any]) -> Tuple[List[Any], str]:", "None except requests.exceptions.HTTPError as e: try: return response.json(), e.args[0] except", "as e: try: return response.json(), e.args[0] except JSONDecodeError: return None,", "io.BufferedReader) -> Tuple[List[Any], str]: \"\"\" send_upload uploads a database to", "List, Tuple from json.decoder import JSONDecodeError import requests import io", "JSON object. - a string describe error if occurs \"\"\"", "response.raise_for_status() return response.json(), None except JSONDecodeError as e: return None,", "None, str(cause.args[0]) def send_request(query_url: str, data: Dict[str, Any]) -> Tuple[List[bytes],", "e.args(0) return None, str(cause.args[0]) def send_request(query_url: str, data: Dict[str, Any])", ": io.BufferedReader A buffered binary stream of the database file.", "database file. Returns ------- Tuple[List[Any], str] The returned data is", "JSON Parameters ---------- query_url : str url of the API", "query_url : str url of the API endpoint data :", "be processed to the server. Returns ------- Tuple[List[Any], str] The", "return response.content, None except requests.exceptions.HTTPError as e: return None, e.args[0]", "response.content, None except requests.exceptions.HTTPError as e: return None, e.args[0] except", "def send_request_json(query_url: str, data: Dict[str, Any]) -> Tuple[List[Any], str]: \"\"\"", "= e.args(0) return None, str(cause.args[0]) def send_upload(query_url: str, data: Dict[str,", "uploads a database to DBHub.io. Parameters ---------- query_url : str", "return response.json(), None except requests.exceptions.HTTPError as e: try: return response.json(),", "returned result as JSON Parameters ---------- query_url : str url", ": str url of the API endpoint data : Dict[str,", "request to DBHub.io. Parameters ---- query_url : str url of", "\"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url,", "requests.exceptions.HTTPError as e: return None, e.args[0] except requests.exceptions.RequestException as e:", "data: Dict[str, Any]) -> Tuple[List[Any], str]: \"\"\" send_request_json sends a", ": Dict[str, Any] data to be processed to the server.------", "except requests.exceptions.RequestException as e: cause = e.args(0) return None, str(cause.args[0])", "return None, e.args[0] except requests.exceptions.HTTPError as e: try: return response.json(),", "a string describe error if occurs \"\"\" try: headers =", "processed to the server.------ Returns ------- List[bytes] database file is", "JSONDecodeError: return None, e.args[0] except requests.exceptions.RequestException as e: cause =", "data: Dict[str, Any]) -> Tuple[List[bytes], str]: \"\"\" send_request sends a", "except JSONDecodeError as e: return None, e.args[0] except TypeError as", "DBHub.io, formatting the returned result as JSON Parameters ---------- query_url", "be processed to the server.------ Returns ------- List[bytes] database file", "\"\"\" send_upload uploads a database to DBHub.io. Parameters ---------- query_url", "if response.status_code != 201: # The returned status code indicates", "response.raise_for_status() if response.status_code != 201: # The returned status code", "API endpoint data : Dict[str, Any] data to be processed", "Parameters ---------- query_url : str url of the API endpoint.", "pydbhub from typing import Any, Dict, List, Tuple from json.decoder", "data to be processed to the server. Returns ------- Tuple[List[Any],", "f'pydbhub v{pydbhub.__version__}'} files = {\"file\": db_bytes} response = requests.post(query_url, data=data,", "e.args[0] except requests.exceptions.HTTPError as e: try: return response.json(), e.args[0] except", "a request to DBHub.io, formatting the returned result as JSON", "string describe error if occurs \"\"\" try: headers = {'User-Agent':", "processed to the server. Returns ------- Tuple[List[Any], str] The returned", "DBHub.io. Parameters ---- query_url : str url of the API", "Any] data to be processed to the server. db_bytes :", "requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.content, None except requests.exceptions.HTTPError as", "went wrong try: return response.json(), str(response.status_code) except JSONDecodeError: return None,", "try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} files = {\"file\": db_bytes}", "endpoint. data : Dict[str, Any] data to be processed to", "sends a request to DBHub.io. Parameters ---- query_url : str", "server. db_bytes : io.BufferedReader A buffered binary stream of the", "Dict[str, Any]) -> Tuple[List[bytes], str]: \"\"\" send_request sends a request", "Dict, List, Tuple from json.decoder import JSONDecodeError import requests import", "except JSONDecodeError: return None, str(response.status_code) return response.json(), None except requests.exceptions.HTTPError", "e: cause = e.args(0) return None, str(cause.args[0]) def send_upload(query_url: str,", "str, data: Dict[str, Any]) -> Tuple[List[bytes], str]: \"\"\" send_request sends", "the API endpoint data : Dict[str, Any] data to be", "except requests.exceptions.HTTPError as e: try: return response.json(), e.args[0] except JSONDecodeError:", "= e.args(0) return None, str(cause.args[0]) def send_request(query_url: str, data: Dict[str,", "the server. db_bytes : io.BufferedReader A buffered binary stream of", "str]: \"\"\" send_request_json sends a request to DBHub.io, formatting the", "to be processed to the server.------ Returns ------- List[bytes] database", "response.raise_for_status() return response.content, None except requests.exceptions.HTTPError as e: return None,", "Any] data to be processed to the server. Returns -------", "formatting the returned result as JSON Parameters ---------- query_url :", "None, e.args[0] except requests.exceptions.HTTPError as e: try: return response.json(), e.args[0]", "Any] data to be processed to the server.------ Returns -------", "return response.json(), str(response.status_code) except JSONDecodeError: return None, str(response.status_code) return response.json(),", "status code indicates something went wrong try: return response.json(), str(response.status_code)", "from typing import Any, Dict, List, Tuple from json.decoder import", "url of the API endpoint. data : Dict[str, Any] data", "except TypeError as e: return None, e.args[0] except requests.exceptions.HTTPError as", "e: return None, e.args[0] except requests.exceptions.RequestException as e: cause =", "import pydbhub from typing import Any, Dict, List, Tuple from", "request to DBHub.io, formatting the returned result as JSON Parameters", "except JSONDecodeError: return None, e.args[0] except requests.exceptions.RequestException as e: cause", "import JSONDecodeError import requests import io def send_request_json(query_url: str, data:", "of JSON object. - a string describe error if occurs", "data to be processed to the server.------ Returns ------- List[bytes]", "processed to the server. db_bytes : io.BufferedReader A buffered binary", "sends a request to DBHub.io, formatting the returned result as", "result as JSON Parameters ---------- query_url : str url of", "of the database file. Returns ------- Tuple[List[Any], str] The returned", "str(response.status_code) return response.json(), None except requests.exceptions.HTTPError as e: try: return", "Tuple from json.decoder import JSONDecodeError import requests import io def", "send_upload uploads a database to DBHub.io. Parameters ---------- query_url :", "describe error if occurs \"\"\" try: headers = {'User-Agent': f'pydbhub", "e.args[0] except requests.exceptions.RequestException as e: cause = e.args(0) return None,", "{'User-Agent': f'pydbhub v{pydbhub.__version__}'} files = {\"file\": db_bytes} response = requests.post(query_url,", "Any, Dict, List, Tuple from json.decoder import JSONDecodeError import requests", "List[bytes] database file is returned as a list of bytes", "response = requests.post(query_url, data=data, headers=headers, files=files) response.raise_for_status() if response.status_code !=", "return None, str(cause.args[0]) def send_upload(query_url: str, data: Dict[str, Any], db_bytes:", "server. Returns ------- Tuple[List[Any], str] The returned data is -", "import requests import io def send_request_json(query_url: str, data: Dict[str, Any])", "return response.json(), None except JSONDecodeError as e: return None, e.args[0]", "cause = e.args(0) return None, str(cause.args[0]) def send_upload(query_url: str, data:", "Parameters ---------- query_url : str url of the API endpoint", "files=files) response.raise_for_status() if response.status_code != 201: # The returned status", "e.args[0] except JSONDecodeError: return None, e.args[0] except requests.exceptions.RequestException as e:", "None, str(cause.args[0]) def send_upload(query_url: str, data: Dict[str, Any], db_bytes: io.BufferedReader)", "Returns ------- List[bytes] database file is returned as a list", "headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} files = {\"file\": db_bytes} response", "None, str(response.status_code) return response.json(), None except requests.exceptions.HTTPError as e: try:", "e.args[0] except TypeError as e: return None, e.args[0] except requests.exceptions.HTTPError", "Any]) -> Tuple[List[bytes], str]: \"\"\" send_request sends a request to", "Parameters ---- query_url : str url of the API endpoint", "import io def send_request_json(query_url: str, data: Dict[str, Any]) -> Tuple[List[Any],", "to DBHub.io. Parameters ---- query_url : str url of the", "data: Dict[str, Any], db_bytes: io.BufferedReader) -> Tuple[List[Any], str]: \"\"\" send_upload", "query_url : str url of the API endpoint. data :", "files = {\"file\": db_bytes} response = requests.post(query_url, data=data, headers=headers, files=files)", "response.status_code != 201: # The returned status code indicates something", "str(response.status_code) except JSONDecodeError: return None, str(response.status_code) return response.json(), None except", "as e: return None, e.args[0] except TypeError as e: return", "Tuple[List[Any], str]: \"\"\" send_upload uploads a database to DBHub.io. Parameters", "v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.content, None", "Tuple[List[Any], str]: \"\"\" send_request_json sends a request to DBHub.io, formatting", "a list of bytes \"\"\" try: headers = {'User-Agent': f'pydbhub", "of the API endpoint data : Dict[str, Any] data to", "Returns ------- Tuple[List[Any], str] The returned data is - a", "url of the API endpoint data : Dict[str, Any] data", "v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.json(), None", "as e: return None, e.args[0] except requests.exceptions.HTTPError as e: try:", ": str url of the API endpoint. data : Dict[str,", "TypeError as e: return None, e.args[0] except requests.exceptions.HTTPError as e:", "e: try: return response.json(), e.args[0] except JSONDecodeError: return None, e.args[0]", "except requests.exceptions.HTTPError as e: return None, e.args[0] except requests.exceptions.RequestException as", "send_request_json sends a request to DBHub.io, formatting the returned result", "send_request sends a request to DBHub.io. Parameters ---- query_url :", "str url of the API endpoint. data : Dict[str, Any]", "\"\"\" send_request_json sends a request to DBHub.io, formatting the returned", "JSONDecodeError as e: return None, e.args[0] except TypeError as e:", "f'pydbhub v{pydbhub.__version__}'} response = requests.post(query_url, data=data, headers=headers) response.raise_for_status() return response.content,", "str]: \"\"\" send_upload uploads a database to DBHub.io. Parameters ----------", "Dict[str, Any] data to be processed to the server. Returns", "to be processed to the server. db_bytes : io.BufferedReader A", "------- Tuple[List[Any], str] The returned data is - a list", "json.decoder import JSONDecodeError import requests import io def send_request_json(query_url: str,", "occurs \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} files =", "to the server. db_bytes : io.BufferedReader A buffered binary stream", "Dict[str, Any] data to be processed to the server. db_bytes", "list of bytes \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'}", "try: return response.json(), e.args[0] except JSONDecodeError: return None, e.args[0] except", "buffered binary stream of the database file. Returns ------- Tuple[List[Any],", "is - a list of JSON object. - a string", "response.json(), e.args[0] except JSONDecodeError: return None, e.args[0] except requests.exceptions.RequestException as", "occurs \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response =", "requests import io def send_request_json(query_url: str, data: Dict[str, Any]) ->", "-> Tuple[List[bytes], str]: \"\"\" send_request sends a request to DBHub.io.", "data=data, headers=headers) response.raise_for_status() return response.content, None except requests.exceptions.HTTPError as e:", "of the API endpoint. data : Dict[str, Any] data to", "data=data, headers=headers) response.raise_for_status() return response.json(), None except JSONDecodeError as e:", "data to be processed to the server. db_bytes : io.BufferedReader", "{\"file\": db_bytes} response = requests.post(query_url, data=data, headers=headers, files=files) response.raise_for_status() if", "= {'User-Agent': f'pydbhub v{pydbhub.__version__}'} files = {\"file\": db_bytes} response =", "if occurs \"\"\" try: headers = {'User-Agent': f'pydbhub v{pydbhub.__version__}'} response", "Any]) -> Tuple[List[Any], str]: \"\"\" send_request_json sends a request to", "headers=headers) response.raise_for_status() return response.json(), None except JSONDecodeError as e: return", "---------- query_url : str url of the API endpoint. data", "- a string describe error if occurs \"\"\" try: headers" ]
[ "have a name & a seed # seed doesn't change,", "# teams will have a name & a seed #", "the next round # - so picking 32, then 16,", "- so picking 32, then 16, then 8, 4, 2,", "16, then 8, 4, 2, 1...i.e. round 1-6 winners #", "pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams, round_winners, picked_winners) # score =", "all_teams, round_winners, picked_winners) # score = round_score(0) # assert score", "name & a seed # seed doesn't change, so maybe", "so maybe make that not passed around w/ results def", "test_round_score_invalid_winner(): VALID_ROUND = 1 all_teams = [] round_winners = []", "round_score(VALID_ROUND, all_teams, round_winners, picked_winners) # score = round_score(0) # assert", "w/ results def test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with pytest.raises(ValueError,", "2, 1...i.e. round 1-6 winners # teams will have a", "test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def", "match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner(): VALID_ROUND = 1 all_teams = []", "make that not passed around w/ results def test_round_score_invalid_round(): with", "passed around w/ results def test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"): round_score(0)", "next round # - so picking 32, then 16, then", "4, 2, 1...i.e. round 1-6 winners # teams will have", "results def test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with pytest.raises(ValueError, match=r\".*range*\"):", "it to the next round # - so picking 32,", "from calcscore import round_score # you'll be picking what teams", "what teams make it to the next round # -", "# - so picking 32, then 16, then 8, 4,", "be picking what teams make it to the next round", "round 1-6 winners # teams will have a name &", "= [] picked_winners = [\"picked team\"] with pytest.raises(ValueError, match=r\".*invalid winner\"):", "calcscore import round_score # you'll be picking what teams make", "then 8, 4, 2, 1...i.e. round 1-6 winners # teams", "doesn't change, so maybe make that not passed around w/", "round_score # you'll be picking what teams make it to", "a name & a seed # seed doesn't change, so", "= [] round_winners = [] picked_winners = [\"picked team\"] with", "[\"picked team\"] with pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams, round_winners, picked_winners)", "make it to the next round # - so picking", "& a seed # seed doesn't change, so maybe make", "that not passed around w/ results def test_round_score_invalid_round(): with pytest.raises(ValueError,", "seed # seed doesn't change, so maybe make that not", "= 1 all_teams = [] round_winners = [] picked_winners =", "1 all_teams = [] round_winners = [] picked_winners = [\"picked", "change, so maybe make that not passed around w/ results", "round # - so picking 32, then 16, then 8,", "8, 4, 2, 1...i.e. round 1-6 winners # teams will", "with pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams, round_winners, picked_winners) # score", "winner\"): round_score(VALID_ROUND, all_teams, round_winners, picked_winners) # score = round_score(0) #", "with pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner():", "around w/ results def test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with", "round_winners = [] picked_winners = [\"picked team\"] with pytest.raises(ValueError, match=r\".*invalid", "match=r\".*range*\"): round_score(0) with pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner(): VALID_ROUND =", "team\"] with pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams, round_winners, picked_winners) #", "winners # teams will have a name & a seed", "<reponame>BrandonLeiran/bracket-scoring import pytest from calcscore import round_score # you'll be", "[] picked_winners = [\"picked team\"] with pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND,", "you'll be picking what teams make it to the next", "def test_round_score_invalid_winner(): VALID_ROUND = 1 all_teams = [] round_winners =", "picking what teams make it to the next round #", "maybe make that not passed around w/ results def test_round_score_invalid_round():", "not passed around w/ results def test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"):", "a seed # seed doesn't change, so maybe make that", "round_score(7) def test_round_score_invalid_winner(): VALID_ROUND = 1 all_teams = [] round_winners", "import pytest from calcscore import round_score # you'll be picking", "picked_winners) # score = round_score(0) # assert score == 0", "picking 32, then 16, then 8, 4, 2, 1...i.e. round", "1...i.e. round 1-6 winners # teams will have a name", "def test_round_score_invalid_round(): with pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with pytest.raises(ValueError, match=r\".*range*\"): round_score(7)", "all_teams = [] round_winners = [] picked_winners = [\"picked team\"]", "pytest.raises(ValueError, match=r\".*range*\"): round_score(0) with pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner(): VALID_ROUND", "with pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner(): VALID_ROUND = 1 all_teams", "match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams, round_winners, picked_winners) # score = round_score(0)", "then 16, then 8, 4, 2, 1...i.e. round 1-6 winners", "import round_score # you'll be picking what teams make it", "seed doesn't change, so maybe make that not passed around", "1-6 winners # teams will have a name & a", "teams will have a name & a seed # seed", "= [\"picked team\"] with pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams, round_winners,", "# you'll be picking what teams make it to the", "picked_winners = [\"picked team\"] with pytest.raises(ValueError, match=r\".*invalid winner\"): round_score(VALID_ROUND, all_teams,", "so picking 32, then 16, then 8, 4, 2, 1...i.e.", "pytest from calcscore import round_score # you'll be picking what", "pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner(): VALID_ROUND = 1 all_teams =", "will have a name & a seed # seed doesn't", "# seed doesn't change, so maybe make that not passed", "round_score(0) with pytest.raises(ValueError, match=r\".*range*\"): round_score(7) def test_round_score_invalid_winner(): VALID_ROUND = 1", "round_winners, picked_winners) # score = round_score(0) # assert score ==", "to the next round # - so picking 32, then", "[] round_winners = [] picked_winners = [\"picked team\"] with pytest.raises(ValueError,", "32, then 16, then 8, 4, 2, 1...i.e. round 1-6", "VALID_ROUND = 1 all_teams = [] round_winners = [] picked_winners", "teams make it to the next round # - so" ]
[ "'21142041'), ('pubmed', '21517057'), ('pubmed', '22229781'), ('pubmed', '15074950'), }, {(xref.prefix, xref.identifier)", "= list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations)) typedef, target = relations[0] self.assertIsNotNone(target)", "typedef.identifier) for typedef in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'), pairs) def", "test_get_node_properties(self): \"\"\"Test getting properties from a node in a :mod:`obonet`", "an :mod:`obonet` graph.\"\"\" pairs = { (typedef.prefix, typedef.identifier) for typedef", "test_get_node_parents(self): \"\"\"Test getting parents from a node in a :mod:`obonet`", "= self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations)) typedef, target", "xref in xrefs }) self.assertEqual( { ('reaxys', '3570522'), ('beilstein', '3570522'),", "iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, ) from tests.constants import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase):", "iterate_node_synonyms, iterate_node_xrefs, ) from tests.constants import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\"", "'15074950'), }, {(xref.prefix, xref.identifier) for xref in xrefs} ) def", "'cas', 'beilstein', 'reaxys'}, { xref.prefix for xref in xrefs })", "{ parent.identifier for parent in parents }) self.assertEqual({'chebi'}, { parent.prefix", "SynonymTypeDef, get from pyobo.struct import Reference from pyobo.struct.struct import (", "in xrefs} ) def test_get_node_relations(self): \"\"\"Test getting relations from a", "Reference from pyobo.struct.struct import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships,", "iterate_node_xrefs, ) from tests.constants import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod", "= relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef)", "failed') # TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type) def", "a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:17051'] relations", "= list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) # NOTE the prefixes are remapped", "Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test generation of", "a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7,", "from tests.constants import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def setUpClass(cls)", "generation of OBO objects.\"\"\" def test_get_obo(self): \"\"\"Test getting an OBO", "value = [value for prop, value in properties if prop", "graph.\"\"\" data = self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations))", "len(synonyms)) synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name parsing failed')", "setUpClass(cls) -> None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test getting", "sum(prop == t_prop for prop, value in properties)) value =", "def test_get_node_relations(self): \"\"\"Test getting relations from a node in a", "in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data))", "key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN',", "self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing failed') # TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME',", "'beilstein', 'reaxys'}, { xref.prefix for xref in xrefs }) self.assertEqual(", "definitions from an :mod:`obonet` graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual(", "from pyobo.struct import Reference from pyobo.struct.struct import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs,", "(typedef.prefix, typedef.identifier) for typedef in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'), pairs)", "PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'}, { xref.prefix for xref in", "xref.prefix for xref in xrefs }) self.assertEqual( { ('reaxys', '3570522'),", "iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, ) from tests.constants", "class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def setUpClass(cls) -> None: cls.graph =", "\"\"\"Test generation of OBO objects.\"\"\" def test_get_obo(self): \"\"\"Test getting an", "node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] properties =", "= list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name,", "{prop for prop, value in properties}) self.assertEqual(1, sum(prop == t_prop", "NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')), synonym_typedefs, )", "an OBO document.\"\"\" obo = get('chebi', url=TEST_CHEBI_OBO_PATH, local=True) terms =", "prop, value in properties)) value = [value for prop, value", "test_get_graph_typedefs(self): \"\"\"Test getting type definitions from an :mod:`obonet` graph.\"\"\" pairs", "test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym type definitions from an :mod:`obonet` graph.\"\"\"", "sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'), ],", "= self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop", "data = self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) # NOTE", "# TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type) def test_get_node_properties(self):", "TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def setUpClass(cls) -> None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH)", "= [value for prop, value in properties if prop ==", "xrefs} ) def test_get_node_relations(self): \"\"\"Test getting relations from a node", "self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test generation", "getting synonym type definitions from an :mod:`obonet` graph.\"\"\" synonym_typedefs =", "from pyobo import SynonymTypeDef, get from pyobo.struct import Reference from", "data = self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'},", "SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')), synonym_typedefs, ) def", "pyobo.struct.struct import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs,", "== t_prop][0] self.assertEqual('261.28318', value) def test_get_node_parents(self): \"\"\"Test getting parents from", "self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test generation of OBO", "import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def setUpClass(cls) -> None:", "parents = list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'}, { parent.identifier for", "len(relations)) typedef, target = relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix)", "the prefixes are remapped by PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'},", "type definitions from an :mod:`obonet` graph.\"\"\" pairs = { (typedef.prefix,", "NAME'), SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')), synonym_typedefs, ) def test_get_node_synonyms(self): \"\"\"Test", "OBO document.\"\"\" obo = get('chebi', url=TEST_CHEBI_OBO_PATH, local=True) terms = list(obo)", "= synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name parsing failed') self.assertEqual('EXACT', synonym.specificity,", "a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1,", "value in properties}) self.assertEqual(1, sum(prop == t_prop for prop, value", "relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef,", "msg='specificity parsing failed') # TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'),", "{ parent.prefix for parent in parents }) def test_get_node_xrefs(self): \"\"\"Test", "by PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'}, { xref.prefix for xref", "synonym.specificity, msg='specificity parsing failed') # TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC", "== t_prop for prop, value in properties)) value = [value", "list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'}, { parent.identifier for parent in", "\"\"\"\"\"\" @classmethod def setUpClass(cls) -> None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def", "<filename>tests/test_get.py import unittest from operator import attrgetter import obonet from", "remapped by PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'}, { xref.prefix for", "xref.identifier) for xref in xrefs} ) def test_get_node_relations(self): \"\"\"Test getting", "self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) # NOTE the prefixes", "prefixes are remapped by PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'}, {", "tests.constants import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def setUpClass(cls) ->", "def test_get_node_parents(self): \"\"\"Test getting parents from a node in a", "import attrgetter import obonet from pyobo import SynonymTypeDef, get from", "{(xref.prefix, xref.identifier) for xref in xrefs} ) def test_get_node_relations(self): \"\"\"Test", "relations from a node in a :mod:`obonet` graph.\"\"\" data =", "len(xrefs)) # NOTE the prefixes are remapped by PyOBO self.assertEqual({'pubmed',", "'21517057'), ('pubmed', '22229781'), ('pubmed', '15074950'), }, {(xref.prefix, xref.identifier) for xref", "parsing failed') # TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type)", "= self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) # NOTE the", "('reaxys', '3570522'), ('beilstein', '3570522'), ('cas', '429-41-4'), ('pubmed', '21142041'), ('pubmed', '21517057'),", "Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix)", "TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def setUpClass(cls) -> None: cls.graph", "typedef in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'), pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test", "list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations)) typedef, target = relations[0] self.assertIsNotNone(target) self.assertIsInstance(target,", "a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data, 'chebi'))", "\"\"\"Test getting synonyms from a node in a :mod:`obonet` graph.\"\"\"", "test_get_obo(self): \"\"\"Test getting an OBO document.\"\"\" obo = get('chebi', url=TEST_CHEBI_OBO_PATH,", "class TestGet(unittest.TestCase): \"\"\"Test generation of OBO objects.\"\"\" def test_get_obo(self): \"\"\"Test", "value) def test_get_node_parents(self): \"\"\"Test getting parents from a node in", "('pubmed', '21142041'), ('pubmed', '21517057'), ('pubmed', '22229781'), ('pubmed', '15074950'), }, {(xref.prefix,", "for prop, value in properties if prop == t_prop][0] self.assertEqual('261.28318',", "}) self.assertEqual( { ('reaxys', '3570522'), ('beilstein', '3570522'), ('cas', '429-41-4'), ('pubmed',", "self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test", "implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type) def test_get_node_properties(self): \"\"\"Test getting", "'22229781'), ('pubmed', '15074950'), }, {(xref.prefix, xref.identifier) for xref in xrefs}", "document.\"\"\" obo = get('chebi', url=TEST_CHEBI_OBO_PATH, local=True) terms = list(obo) self.assertEqual(18,", ":mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data)) t_prop =", "getting type definitions from an :mod:`obonet` graph.\"\"\" pairs = {", "synonyms from a node in a :mod:`obonet` graph.\"\"\" data =", "('pubmed', '22229781'), ('pubmed', '15074950'), }, {(xref.prefix, xref.identifier) for xref in", "t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for prop, value in properties})", "properties}) self.assertEqual(1, sum(prop == t_prop for prop, value in properties))", "name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')), synonym_typedefs, ) def test_get_node_synonyms(self):", "= list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'}, { parent.identifier for parent", "def test_get_node_synonyms(self): \"\"\"Test getting synonyms from a node in a", "parents }) def test_get_node_xrefs(self): \"\"\"Test getting parents from a node", "'chebi')) self.assertEqual(1, len(relations)) typedef, target = relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference)", "self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for", "in properties if prop == t_prop][0] self.assertEqual('261.28318', value) def test_get_node_parents(self):", "from a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990']", "cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test getting type definitions from", "properties from a node in a :mod:`obonet` graph.\"\"\" data =", "a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] parents", "in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'), pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test getting", "import SynonymTypeDef, get from pyobo.struct import Reference from pyobo.struct.struct import", "parent.prefix for parent in parents }) def test_get_node_xrefs(self): \"\"\"Test getting", "prop, value in properties if prop == t_prop][0] self.assertEqual('261.28318', value)", "def test_get_node_xrefs(self): \"\"\"Test getting parents from a node in a", "NAME'), synonym.type) def test_get_node_properties(self): \"\"\"Test getting properties from a node", "synonym_typedefs, ) def test_get_node_synonyms(self): \"\"\"Test getting synonyms from a node", "'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for prop, value in properties}) self.assertEqual(1, sum(prop", "# NOTE the prefixes are remapped by PyOBO self.assertEqual({'pubmed', 'cas',", "@classmethod def setUpClass(cls) -> None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self):", "a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data)) self.assertEqual(2,", "\"\"\"Test getting properties from a node in a :mod:`obonet` graph.\"\"\"", "self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi',", "self.assertEqual( { ('reaxys', '3570522'), ('beilstein', '3570522'), ('cas', '429-41-4'), ('pubmed', '21142041'),", "= self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym = synonyms[0]", "-> None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test getting type", "{ (typedef.prefix, typedef.identifier) for typedef in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'),", "are remapped by PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'}, { xref.prefix", "in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data))", "self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test generation of OBO objects.\"\"\" def", "= obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test getting type definitions from an", "import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, )", "'3570522'), ('cas', '429-41-4'), ('pubmed', '21142041'), ('pubmed', '21517057'), ('pubmed', '22229781'), ('pubmed',", "node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] synonyms =", "an :mod:`obonet` graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME',", ":mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1,", "TODO implement # self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type) def test_get_node_properties(self): \"\"\"Test", "typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test generation of OBO objects.\"\"\"", "def test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym type definitions from an :mod:`obonet`", "self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference)", "prop, value in properties}) self.assertEqual(1, sum(prop == t_prop for prop,", "synonym type definitions from an :mod:`obonet` graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph),", "synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride',", "parent.identifier for parent in parents }) self.assertEqual({'chebi'}, { parent.prefix for", "\"\"\"Test getting parents from a node in a :mod:`obonet` graph.\"\"\"", "parent in parents }) self.assertEqual({'chebi'}, { parent.prefix for parent in", "for typedef in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'), pairs) def test_get_graph_synonym_typedefs(self):", "target = relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier)", ":mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs))", "}) self.assertEqual({'chebi'}, { parent.prefix for parent in parents }) def", ":mod:`obonet` graph.\"\"\" pairs = { (typedef.prefix, typedef.identifier) for typedef in", "pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym type definitions from an", "self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'}, { parent.identifier for parent in parents", "properties)) value = [value for prop, value in properties if", "msg='name parsing failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing failed') # TODO", "SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')),", "# self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type) def test_get_node_properties(self): \"\"\"Test getting properties", "self.assertEqual({'24060', '51992'}, { parent.identifier for parent in parents }) self.assertEqual({'chebi'},", "iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, ) from tests.constants import", "self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations)) typedef, target =", "'429-41-4'), ('pubmed', '21142041'), ('pubmed', '21517057'), ('pubmed', '22229781'), ('pubmed', '15074950'), },", "self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium", "self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'),", "self.assertEqual(1, sum(prop == t_prop for prop, value in properties)) value", "self.assertIn(t_prop, {prop for prop, value in properties}) self.assertEqual(1, sum(prop ==", "iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, ) from tests.constants import TEST_CHEBI_OBO_PATH", "from pyobo.struct.struct import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms,", "self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class", "('pubmed', '15074950'), }, {(xref.prefix, xref.identifier) for xref in xrefs} )", "def test_get_graph_typedefs(self): \"\"\"Test getting type definitions from an :mod:`obonet` graph.\"\"\"", "\"\"\"Test getting relations from a node in a :mod:`obonet` graph.\"\"\"", "fluoride', synonym.name, msg='name parsing failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing failed')", ") from tests.constants import TEST_CHEBI_OBO_PATH class TestParseObonet(unittest.TestCase): \"\"\"\"\"\" @classmethod def", "from an :mod:`obonet` graph.\"\"\" pairs = { (typedef.prefix, typedef.identifier) for", "for prop, value in properties)) value = [value for prop,", "operator import attrgetter import obonet from pyobo import SynonymTypeDef, get", "for prop, value in properties}) self.assertEqual(1, sum(prop == t_prop for", "getting an OBO document.\"\"\" obo = get('chebi', url=TEST_CHEBI_OBO_PATH, local=True) terms", "definitions from an :mod:`obonet` graph.\"\"\" pairs = { (typedef.prefix, typedef.identifier)", "\"\"\"Test getting an OBO document.\"\"\" obo = get('chebi', url=TEST_CHEBI_OBO_PATH, local=True)", "}, {(xref.prefix, xref.identifier) for xref in xrefs} ) def test_get_node_relations(self):", "= 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for prop, value in properties}) self.assertEqual(1,", "self.assertIn(('chebi', 'has_part'), pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym type definitions", "import obonet from pyobo import SynonymTypeDef, get from pyobo.struct import", ":mod:`obonet` graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC", "= list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for prop, value", "prop == t_prop][0] self.assertEqual('261.28318', value) def test_get_node_parents(self): \"\"\"Test getting parents", "a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data)) t_prop", "for xref in xrefs }) self.assertEqual( { ('reaxys', '3570522'), ('beilstein',", "objects.\"\"\" def test_get_obo(self): \"\"\"Test getting an OBO document.\"\"\" obo =", "attrgetter import obonet from pyobo import SynonymTypeDef, get from pyobo.struct", "properties = list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for prop,", "synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name parsing failed') self.assertEqual('EXACT',", "typedef, target = relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi', target.prefix) self.assertEqual('29228',", "NOTE the prefixes are remapped by PyOBO self.assertEqual({'pubmed', 'cas', 'beilstein',", "obonet from pyobo import SynonymTypeDef, get from pyobo.struct import Reference", "parent in parents }) def test_get_node_xrefs(self): \"\"\"Test getting parents from", "( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, ) from", "graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym", "node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] xrefs =", "typedef.identifier) class TestGet(unittest.TestCase): \"\"\"Test generation of OBO objects.\"\"\" def test_get_obo(self):", "= sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND", "relations = list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations)) typedef, target = relations[0]", "obo = get('chebi', url=TEST_CHEBI_OBO_PATH, local=True) terms = list(obo) self.assertEqual(18, len(terms))", "test_get_node_relations(self): \"\"\"Test getting relations from a node in a :mod:`obonet`", "value in properties)) value = [value for prop, value in", "], key=attrgetter('id')), synonym_typedefs, ) def test_get_node_synonyms(self): \"\"\"Test getting synonyms from", "data = self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data, 'chebi')) self.assertEqual(1, len(relations)) typedef,", "for parent in parents }) self.assertEqual({'chebi'}, { parent.prefix for parent", "sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'),", "self.assertEqual(1, len(synonyms)) synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name parsing", ") def test_get_node_relations(self): \"\"\"Test getting relations from a node in", "pyobo import SynonymTypeDef, get from pyobo.struct import Reference from pyobo.struct.struct", "'reaxys'}, { xref.prefix for xref in xrefs }) self.assertEqual( {", "def test_get_node_properties(self): \"\"\"Test getting properties from a node in a", "}) def test_get_node_xrefs(self): \"\"\"Test getting parents from a node in", "list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop, {prop for prop, value in", "iterate_node_properties, iterate_node_relationships, iterate_node_synonyms, iterate_node_xrefs, ) from tests.constants import TEST_CHEBI_OBO_PATH class", "import unittest from operator import attrgetter import obonet from pyobo", "in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data))", "value in properties if prop == t_prop][0] self.assertEqual('261.28318', value) def", "target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier)", "name='INN'), ], key=attrgetter('id')), synonym_typedefs, ) def test_get_node_synonyms(self): \"\"\"Test getting synonyms", "} self.assertIn(('chebi', 'has_part'), pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym type", "self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name parsing failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing", "test_get_node_xrefs(self): \"\"\"Test getting parents from a node in a :mod:`obonet`", "synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME',", "self.assertEqual({'chebi'}, { parent.prefix for parent in parents }) def test_get_node_xrefs(self):", "from operator import attrgetter import obonet from pyobo import SynonymTypeDef,", "from an :mod:`obonet` graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([", "iterate_graph_typedefs(self.graph) } self.assertIn(('chebi', 'has_part'), pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym", "TestGet(unittest.TestCase): \"\"\"Test generation of OBO objects.\"\"\" def test_get_obo(self): \"\"\"Test getting", "pyobo.struct import Reference from pyobo.struct.struct import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents,", "parents from a node in a :mod:`obonet` graph.\"\"\" data =", "in properties}) self.assertEqual(1, sum(prop == t_prop for prop, value in", "data = self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym =", "parsing failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing failed') # TODO implement", "node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:17051'] relations =", "self.assertEqual(1, len(relations)) typedef, target = relations[0] self.assertIsNotNone(target) self.assertIsInstance(target, Reference) self.assertEqual('chebi',", "self.assertEqual(SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'), synonym.type) def test_get_node_properties(self): \"\"\"Test getting properties from", "obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test getting type definitions from an :mod:`obonet`", "OBO objects.\"\"\" def test_get_obo(self): \"\"\"Test getting an OBO document.\"\"\" obo", "graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id')) self.assertEqual( sorted([ SynonymTypeDef(id='IUPAC_NAME', name='IUPAC NAME'),", "in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:17051'] relations = list(iterate_node_relationships(data,", "getting relations from a node in a :mod:`obonet` graph.\"\"\" data", ":mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms))", "a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] synonyms", "parents }) self.assertEqual({'chebi'}, { parent.prefix for parent in parents })", "unittest from operator import attrgetter import obonet from pyobo import", "('beilstein', '3570522'), ('cas', '429-41-4'), ('pubmed', '21142041'), ('pubmed', '21517057'), ('pubmed', '22229781'),", "self.assertEqual({'pubmed', 'cas', 'beilstein', 'reaxys'}, { xref.prefix for xref in xrefs", "def setUpClass(cls) -> None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test", "in parents }) def test_get_node_xrefs(self): \"\"\"Test getting parents from a", "a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] xrefs", "synonym.name, msg='name parsing failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing failed') #", ") def test_get_node_synonyms(self): \"\"\"Test getting synonyms from a node in", "self.assertEqual(7, len(xrefs)) # NOTE the prefixes are remapped by PyOBO", "for parent in parents }) def test_get_node_xrefs(self): \"\"\"Test getting parents", "node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] parents =", "graph.\"\"\" pairs = { (typedef.prefix, typedef.identifier) for typedef in iterate_graph_typedefs(self.graph)", "graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) #", "name='IUPAC NAME'), synonym.type) def test_get_node_properties(self): \"\"\"Test getting properties from a", "import Reference from pyobo.struct.struct import ( iterate_graph_synonym_typedefs, iterate_graph_typedefs, iterate_node_parents, iterate_node_properties,", "if prop == t_prop][0] self.assertEqual('261.28318', value) def test_get_node_parents(self): \"\"\"Test getting", "synonym.type) def test_get_node_properties(self): \"\"\"Test getting properties from a node in", "[value for prop, value in properties if prop == t_prop][0]", "xrefs }) self.assertEqual( { ('reaxys', '3570522'), ('beilstein', '3570522'), ('cas', '429-41-4'),", "{ xref.prefix for xref in xrefs }) self.assertEqual( { ('reaxys',", "test_get_node_synonyms(self): \"\"\"Test getting synonyms from a node in a :mod:`obonet`", "getting synonyms from a node in a :mod:`obonet` graph.\"\"\" data", "for xref in xrefs} ) def test_get_node_relations(self): \"\"\"Test getting relations", "synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name parsing failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity", "getting parents from a node in a :mod:`obonet` graph.\"\"\" data", "list(iterate_node_synonyms(data)) self.assertEqual(1, len(synonyms)) synonym = synonyms[0] self.assertEqual('N,N,N-tributylbutan-1-aminium fluoride', synonym.name, msg='name", "in xrefs }) self.assertEqual( { ('reaxys', '3570522'), ('beilstein', '3570522'), ('cas',", "= self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'}, {", "graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060',", "\"\"\"Test getting synonym type definitions from an :mod:`obonet` graph.\"\"\" synonym_typedefs", "from a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:17051']", "target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of', typedef.identifier) class TestGet(unittest.TestCase):", "t_prop for prop, value in properties)) value = [value for", "\"\"\"Test getting type definitions from an :mod:`obonet` graph.\"\"\" pairs =", "in properties)) value = [value for prop, value in properties", "None: cls.graph = obonet.read_obo(TEST_CHEBI_OBO_PATH) def test_get_graph_typedefs(self): \"\"\"Test getting type definitions", "t_prop][0] self.assertEqual('261.28318', value) def test_get_node_parents(self): \"\"\"Test getting parents from a", "getting properties from a node in a :mod:`obonet` graph.\"\"\" data", "def test_get_obo(self): \"\"\"Test getting an OBO document.\"\"\" obo = get('chebi',", "self.assertEqual('261.28318', value) def test_get_node_parents(self): \"\"\"Test getting parents from a node", "{ ('reaxys', '3570522'), ('beilstein', '3570522'), ('cas', '429-41-4'), ('pubmed', '21142041'), ('pubmed',", "failed') self.assertEqual('EXACT', synonym.specificity, msg='specificity parsing failed') # TODO implement #", "SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')), synonym_typedefs, ) def test_get_node_synonyms(self): \"\"\"Test getting", "('cas', '429-41-4'), ('pubmed', '21142041'), ('pubmed', '21517057'), ('pubmed', '22229781'), ('pubmed', '15074950'),", "in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] synonyms = list(iterate_node_synonyms(data))", "in parents }) self.assertEqual({'chebi'}, { parent.prefix for parent in parents", "self.assertEqual('chebi', target.prefix) self.assertEqual('29228', target.identifier) self.assertIsNotNone(typedef) self.assertIsInstance(typedef, Reference) self.assertEqual('chebi', typedef.prefix) self.assertEqual('is_conjugate_base_of',", "len(parents)) self.assertEqual({'24060', '51992'}, { parent.identifier for parent in parents })", "data = self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass' self.assertIn(t_prop,", "= { (typedef.prefix, typedef.identifier) for typedef in iterate_graph_typedefs(self.graph) } self.assertIn(('chebi',", "a node in a :mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] properties", "('pubmed', '21517057'), ('pubmed', '22229781'), ('pubmed', '15074950'), }, {(xref.prefix, xref.identifier) for", "name='IUPAC NAME'), SynonymTypeDef(id='BRAND_NAME', name='BRAND NAME'), SynonymTypeDef(id='INN', name='INN'), ], key=attrgetter('id')), synonym_typedefs,", "get from pyobo.struct import Reference from pyobo.struct.struct import ( iterate_graph_synonym_typedefs,", "'51992'}, { parent.identifier for parent in parents }) self.assertEqual({'chebi'}, {", "key=attrgetter('id')), synonym_typedefs, ) def test_get_node_synonyms(self): \"\"\"Test getting synonyms from a", "xrefs = list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) # NOTE the prefixes are", "of OBO objects.\"\"\" def test_get_obo(self): \"\"\"Test getting an OBO document.\"\"\"", "type definitions from an :mod:`obonet` graph.\"\"\" synonym_typedefs = sorted(iterate_graph_synonym_typedefs(self.graph), key=attrgetter('id'))", ":mod:`obonet` graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data)) self.assertEqual(2, len(parents))", "graph.\"\"\" data = self.graph.nodes['CHEBI:51990'] properties = list(iterate_node_properties(data)) t_prop = 'http://purl.obolibrary.org/obo/chebi/monoisotopicmass'", "pairs = { (typedef.prefix, typedef.identifier) for typedef in iterate_graph_typedefs(self.graph) }", "list(iterate_node_xrefs(data)) self.assertEqual(7, len(xrefs)) # NOTE the prefixes are remapped by", "'has_part'), pairs) def test_get_graph_synonym_typedefs(self): \"\"\"Test getting synonym type definitions from", "self.graph.nodes['CHEBI:51990'] parents = list(iterate_node_parents(data)) self.assertEqual(2, len(parents)) self.assertEqual({'24060', '51992'}, { parent.identifier", "xref in xrefs} ) def test_get_node_relations(self): \"\"\"Test getting relations from", "properties if prop == t_prop][0] self.assertEqual('261.28318', value) def test_get_node_parents(self): \"\"\"Test", "'3570522'), ('beilstein', '3570522'), ('cas', '429-41-4'), ('pubmed', '21142041'), ('pubmed', '21517057'), ('pubmed'," ]
[ "interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]),", "\"\"\" El proceso es muy pesado y no es posible", "interest_points_df.loc[j, \"name\"] ]) final = list(zip(*results)) return pd.DataFrame({'fid': final[0], 'distance':", "coding: utf-8 -*- # + ## Utilidades comunes entre places", "pd def distance(lat1, lon1, lat2, lon2): \"\"\" El resultado de", "interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"] ]) final", "[777], 'latitude': [lat], 'longitude': [lon]}) \"\"\" El proceso es muy", "y OSM. # + import csv import ast import codecs", "cos((lat2 - lat1) * p)/2 + cos(lat1 * p) *", "i in location_df_complete.index: for j in interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"],", "p) * (1 - cos((lon2 - lon1) * p)) /", "final[2], 'p_lon': final[3], 'i_lat': final[4], 'i_lon': final[5], 'amenity': final[6], 'name':", "0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 *", "proceso es muy pesado y no es posible hacer el", "cos(lat2 * p) * (1 - cos((lon2 - lon1) *", "no es posible hacer el ananlisis con toda la data", "in interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j,", "delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as fp: reader = csv.reader(fp,", "join. \"\"\" def compute_cross_distances(location_df, interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude =", "* p)/2 + cos(lat1 * p) * cos(lat2 * p)", "= df[\"longitude\"].mean() return pd.DataFrame({'fid': [777], 'latitude': [lat], 'longitude': [lon]}) \"\"\"", "-*- # + ## Utilidades comunes entre places y OSM.", "para caber en memoria. El uso correcto es filtrar los", "math import cos, asin, sqrt # + def read_csv_with_encoding(filename, delimiter=\"|\",", "pandas as pd def distance(lat1, lon1, lat2, lon2): \"\"\" El", "registros es demasiado grande para caber en memoria. El uso", "l = a.read() json_file = ast.literal_eval(l) return json_file # -", "csv.reader(fp, delimiter=delimiter) csvFile = list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename,", "* asin(sqrt(a)) def build_center_point(df): lat = df[\"latitude\"].mean() lon = df[\"longitude\"].mean()", "= csv.reader(fp, delimiter=delimiter) csvFile = list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def", "asin(sqrt(a)) def build_center_point(df): lat = df[\"latitude\"].mean() lon = df[\"longitude\"].mean() return", "antes de hacer el cross join. \"\"\" def compute_cross_distances(location_df, interest_points_df=None):", "results = [] for i in location_df_complete.index: for j in", "location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"],", "ast import codecs from math import cos, asin, sqrt #", "p)/2 + cos(lat1 * p) * cos(lat2 * p) *", "a.read() json_file = ast.literal_eval(l) return json_file # - import pandas", "* (1 - cos((lon2 - lon1) * p)) / 2", "ananlisis con toda la data de bogotá, el número de", "datos antes de hacer el cross join. \"\"\" def compute_cross_distances(location_df,", "return pd.DataFrame({'fid': [777], 'latitude': [lat], 'longitude': [lon]}) \"\"\" El proceso", "def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as a: l =", "lat = df[\"latitude\"].mean() lon = df[\"longitude\"].mean() return pd.DataFrame({'fid': [777], 'latitude':", "caber en memoria. El uso correcto es filtrar los datos", "ast.literal_eval(l) return json_file # - import pandas as pd def", "resultado de la medición de distancia esta en kilometros. \"\"\"", "\"\"\" El resultado de la medición de distancia esta en", "medición de distancia esta en kilometros. \"\"\" p = 0.017453292519943295", "- cos((lat2 - lat1) * p)/2 + cos(lat1 * p)", "csv import ast import codecs from math import cos, asin,", "list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding)", "* p) * (1 - cos((lon2 - lon1) * p))", "condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude & condition_longitude] results =", "- cos((lon2 - lon1) * p)) / 2 return 12742", "distancia esta en kilometros. \"\"\" p = 0.017453292519943295 #Pi/180 a", "2 return 12742 * asin(sqrt(a)) def build_center_point(df): lat = df[\"latitude\"].mean()", "results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j,", "read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as a: l = a.read()", "encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as a: l = a.read() json_file", "as a: l = a.read() json_file = ast.literal_eval(l) return json_file", "\"\"\" def compute_cross_distances(location_df, interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna()", "OSM. # + import csv import ast import codecs from", "location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"],", "a: l = a.read() json_file = ast.literal_eval(l) return json_file #", "hacer el cross join. \"\"\" def compute_cross_distances(location_df, interest_points_df=None): condition_latitude =", "location_df_complete.index: for j in interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"],", "as fp: reader = csv.reader(fp, delimiter=delimiter) csvFile = list(reader) return", "0.017453292519943295 #Pi/180 a = 0.5 - cos((lat2 - lat1) *", "es filtrar los datos antes de hacer el cross join.", "= 0.017453292519943295 #Pi/180 a = 0.5 - cos((lat2 - lat1)", "el cross join. \"\"\" def compute_cross_distances(location_df, interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna()", "list(zip(*results)) return pd.DataFrame({'fid': final[0], 'distance': final[1], 'p_lat': final[2], 'p_lon': final[3],", "return pd.DataFrame({'fid': final[0], 'distance': final[1], 'p_lat': final[2], 'p_lon': final[3], 'i_lat':", "pd.DataFrame({'fid': [777], 'latitude': [lat], 'longitude': [lon]}) \"\"\" El proceso es", "el ananlisis con toda la data de bogotá, el número", "\"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i,", "\"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j,", "interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"] ]) final = list(zip(*results)) return pd.DataFrame({'fid':", "uso correcto es filtrar los datos antes de hacer el", "# + import csv import ast import codecs from math", "lon1, lat2, lon2): \"\"\" El resultado de la medición de", "pesado y no es posible hacer el ananlisis con toda", "- lat1) * p)/2 + cos(lat1 * p) * cos(lat2", "El uso correcto es filtrar los datos antes de hacer", "read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as fp: reader =", "+ def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as fp:", "columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as a: l", "12742 * asin(sqrt(a)) def build_center_point(df): lat = df[\"latitude\"].mean() lon =", "cos(lat1 * p) * cos(lat2 * p) * (1 -", "\"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"] ]) final = list(zip(*results)) return", "filtrar los datos antes de hacer el cross join. \"\"\"", "* p) * cos(lat2 * p) * (1 - cos((lon2", "distance(lat1, lon1, lat2, lon2): \"\"\" El resultado de la medición", "cross join. \"\"\" def compute_cross_distances(location_df, interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude", "\"\"\" p = 0.017453292519943295 #Pi/180 a = 0.5 - cos((lat2", "with codecs.open(filename, encoding=encoding) as a: l = a.read() json_file =", "condition_longitude] results = [] for i in location_df_complete.index: for j", "#Pi/180 a = 0.5 - cos((lat2 - lat1) * p)/2", "= ~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude & condition_longitude] results = []", "encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as fp: reader = csv.reader(fp, delimiter=delimiter)", "es muy pesado y no es posible hacer el ananlisis", "a = 0.5 - cos((lat2 - lat1) * p)/2 +", "muy pesado y no es posible hacer el ananlisis con", "= location_df.loc[condition_latitude & condition_longitude] results = [] for i in", "El proceso es muy pesado y no es posible hacer", "def distance(lat1, lon1, lat2, lon2): \"\"\" El resultado de la", "return 12742 * asin(sqrt(a)) def build_center_point(df): lat = df[\"latitude\"].mean() lon", "as pd def distance(lat1, lon1, lat2, lon2): \"\"\" El resultado", "utf-8 -*- # + ## Utilidades comunes entre places y", "json_file = ast.literal_eval(l) return json_file # - import pandas as", "lon1) * p)) / 2 return 12742 * asin(sqrt(a)) def", "delimiter=delimiter) csvFile = list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"):", "lat1) * p)/2 + cos(lat1 * p) * cos(lat2 *", "reader = csv.reader(fp, delimiter=delimiter) csvFile = list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0])", "import codecs from math import cos, asin, sqrt # +", "con toda la data de bogotá, el número de registros", "sqrt # + def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding)", "= [] for i in location_df_complete.index: for j in interest_points_df.index:", "\"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j,", "\"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"] ])", "hacer el ananlisis con toda la data de bogotá, el", "= 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1", "de hacer el cross join. \"\"\" def compute_cross_distances(location_df, interest_points_df=None): condition_latitude", "import ast import codecs from math import cos, asin, sqrt", "fp: reader = csv.reader(fp, delimiter=delimiter) csvFile = list(reader) return pd.DataFrame(csvFile[1:],", "codecs.open(filename, encoding=encoding) as fp: reader = csv.reader(fp, delimiter=delimiter) csvFile =", "\"name\"] ]) final = list(zip(*results)) return pd.DataFrame({'fid': final[0], 'distance': final[1],", "es demasiado grande para caber en memoria. El uso correcto", "location_df.loc[condition_latitude & condition_longitude] results = [] for i in location_df_complete.index:", "import cos, asin, sqrt # + def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"):", "en kilometros. \"\"\" p = 0.017453292519943295 #Pi/180 a = 0.5", "codecs from math import cos, asin, sqrt # + def", "encoding=encoding) as a: l = a.read() json_file = ast.literal_eval(l) return", "la medición de distancia esta en kilometros. \"\"\" p =", "lon2): \"\"\" El resultado de la medición de distancia esta", "location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])),", "+ ## Utilidades comunes entre places y OSM. # +", "de distancia esta en kilometros. \"\"\" p = 0.017453292519943295 #Pi/180", "memoria. El uso correcto es filtrar los datos antes de", "bogotá, el número de registros es demasiado grande para caber", "'p_lon': final[3], 'i_lat': final[4], 'i_lon': final[5], 'amenity': final[6], 'name': final[7]})", "condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude &", "~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude & condition_longitude] results = [] for", "# -*- coding: utf-8 -*- # + ## Utilidades comunes", "compute_cross_distances(location_df, interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete =", "distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"],", "df[\"latitude\"].mean() lon = df[\"longitude\"].mean() return pd.DataFrame({'fid': [777], 'latitude': [lat], 'longitude':", "es posible hacer el ananlisis con toda la data de", "p)) / 2 return 12742 * asin(sqrt(a)) def build_center_point(df): lat", "p = 0.017453292519943295 #Pi/180 a = 0.5 - cos((lat2 -", "csvFile = list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with", "y no es posible hacer el ananlisis con toda la", "& condition_longitude] results = [] for i in location_df_complete.index: for", "los datos antes de hacer el cross join. \"\"\" def", "posible hacer el ananlisis con toda la data de bogotá,", "de la medición de distancia esta en kilometros. \"\"\" p", "= list(reader) return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename,", "grande para caber en memoria. El uso correcto es filtrar", "import pandas as pd def distance(lat1, lon1, lat2, lon2): \"\"\"", "pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as a:", "entre places y OSM. # + import csv import ast", "* p)) / 2 return 12742 * asin(sqrt(a)) def build_center_point(df):", "location_df_complete = location_df.loc[condition_latitude & condition_longitude] results = [] for i", "* cos(lat2 * p) * (1 - cos((lon2 - lon1)", "número de registros es demasiado grande para caber en memoria.", "codecs.open(filename, encoding=encoding) as a: l = a.read() json_file = ast.literal_eval(l)", "en memoria. El uso correcto es filtrar los datos antes", "- import pandas as pd def distance(lat1, lon1, lat2, lon2):", "correcto es filtrar los datos antes de hacer el cross", "return pd.DataFrame(csvFile[1:], columns=csvFile[0]) def read_json_with_encoding(filename, encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as", "-*- coding: utf-8 -*- # + ## Utilidades comunes entre", "la data de bogotá, el número de registros es demasiado", "in location_df_complete.index: for j in interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i,", "j in interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"],", "/ 2 return 12742 * asin(sqrt(a)) def build_center_point(df): lat =", "lon = df[\"longitude\"].mean() return pd.DataFrame({'fid': [777], 'latitude': [lat], 'longitude': [lon]})", "'longitude': [lon]}) \"\"\" El proceso es muy pesado y no", "\"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j,", "from math import cos, asin, sqrt # + def read_csv_with_encoding(filename,", "cos, asin, sqrt # + def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with", "encoding=encoding) as fp: reader = csv.reader(fp, delimiter=delimiter) csvFile = list(reader)", "[lon]}) \"\"\" El proceso es muy pesado y no es", "toda la data de bogotá, el número de registros es", "'p_lat': final[2], 'p_lon': final[3], 'i_lat': final[4], 'i_lon': final[5], 'amenity': final[6],", "build_center_point(df): lat = df[\"latitude\"].mean() lon = df[\"longitude\"].mean() return pd.DataFrame({'fid': [777],", "esta en kilometros. \"\"\" p = 0.017453292519943295 #Pi/180 a =", "def build_center_point(df): lat = df[\"latitude\"].mean() lon = df[\"longitude\"].mean() return pd.DataFrame({'fid':", "places y OSM. # + import csv import ast import", "+ cos(lat1 * p) * cos(lat2 * p) * (1", "float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"],", "\"amenity\"], interest_points_df.loc[j, \"name\"] ]) final = list(zip(*results)) return pd.DataFrame({'fid': final[0],", "# - import pandas as pd def distance(lat1, lon1, lat2,", "with codecs.open(filename, encoding=encoding) as fp: reader = csv.reader(fp, delimiter=delimiter) csvFile", "p) * cos(lat2 * p) * (1 - cos((lon2 -", "= ~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude & condition_longitude]", "location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"]", "(1 - cos((lon2 - lon1) * p)) / 2 return", "El resultado de la medición de distancia esta en kilometros.", "]) final = list(zip(*results)) return pd.DataFrame({'fid': final[0], 'distance': final[1], 'p_lat':", "[lat], 'longitude': [lon]}) \"\"\" El proceso es muy pesado y", "'distance': final[1], 'p_lat': final[2], 'p_lon': final[3], 'i_lat': final[4], 'i_lon': final[5],", "<reponame>ymontilla/WebScrapingCatastro # -*- coding: utf-8 -*- # + ## Utilidades", "# + def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as", "## Utilidades comunes entre places y OSM. # + import", "de registros es demasiado grande para caber en memoria. El", "'latitude': [lat], 'longitude': [lon]}) \"\"\" El proceso es muy pesado", "def compute_cross_distances(location_df, interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete", "final = list(zip(*results)) return pd.DataFrame({'fid': final[0], 'distance': final[1], 'p_lat': final[2],", "\"latitude\"], location_df_complete.loc[i, \"longitude\"], float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i,", "return json_file # - import pandas as pd def distance(lat1,", "= a.read() json_file = ast.literal_eval(l) return json_file # - import", "demasiado grande para caber en memoria. El uso correcto es", "final[0], 'distance': final[1], 'p_lat': final[2], 'p_lon': final[3], 'i_lat': final[4], 'i_lon':", "- lon1) * p)) / 2 return 12742 * asin(sqrt(a))", "def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename, encoding=encoding) as fp: reader", "float(interest_points_df.loc[j, \"lat\"]), float(interest_points_df.loc[j, \"lon\"])), location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"],", "= ast.literal_eval(l) return json_file # - import pandas as pd", "\"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"] ]) final =", "import csv import ast import codecs from math import cos,", "kilometros. \"\"\" p = 0.017453292519943295 #Pi/180 a = 0.5 -", "# + ## Utilidades comunes entre places y OSM. #", "cos((lon2 - lon1) * p)) / 2 return 12742 *", "\"latitude\"], location_df_complete.loc[i, \"longitude\"], interest_points_df.loc[j, \"lat\"], interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j,", "interest_points_df.loc[j, \"lon\"], interest_points_df.loc[j, \"amenity\"], interest_points_df.loc[j, \"name\"] ]) final = list(zip(*results))", "comunes entre places y OSM. # + import csv import", "interest_points_df=None): condition_latitude = ~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude", "[] for i in location_df_complete.index: for j in interest_points_df.index: results.append([", "pd.DataFrame({'fid': final[0], 'distance': final[1], 'p_lat': final[2], 'p_lon': final[3], 'i_lat': final[4],", "data de bogotá, el número de registros es demasiado grande", "for i in location_df_complete.index: for j in interest_points_df.index: results.append([ location_df_complete.loc[i,", "json_file # - import pandas as pd def distance(lat1, lon1,", "df[\"longitude\"].mean() return pd.DataFrame({'fid': [777], 'latitude': [lat], 'longitude': [lon]}) \"\"\" El", "= list(zip(*results)) return pd.DataFrame({'fid': final[0], 'distance': final[1], 'p_lat': final[2], 'p_lon':", "final[1], 'p_lat': final[2], 'p_lon': final[3], 'i_lat': final[4], 'i_lon': final[5], 'amenity':", "lat2, lon2): \"\"\" El resultado de la medición de distancia", "asin, sqrt # + def read_csv_with_encoding(filename, delimiter=\"|\", encoding=\"iso-8859-1\"): with codecs.open(filename,", "Utilidades comunes entre places y OSM. # + import csv", "= df[\"latitude\"].mean() lon = df[\"longitude\"].mean() return pd.DataFrame({'fid': [777], 'latitude': [lat],", "el número de registros es demasiado grande para caber en", "de bogotá, el número de registros es demasiado grande para", "+ import csv import ast import codecs from math import", "~location_df[\"latitude\"].isna() condition_longitude = ~location_df[\"longitude\"].isna() location_df_complete = location_df.loc[condition_latitude & condition_longitude] results", "for j in interest_points_df.index: results.append([ location_df_complete.loc[i, \"fid\"], distance(location_df_complete.loc[i, \"latitude\"], location_df_complete.loc[i," ]
[ "query_list) elif matchedGames[b][5] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]]", "then imports them into a MySQL table, example in workbench", "loops through one and Updates the tables where needed. while", "str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score'])", "inserted. print('----------------------------------------') print(str(updateCount) + ' GAMES UPDATED, and ' +", "WHERE Game_ID = %s' gameID = (str(allGames[c][0]),) mycursor.execute(query_string, gameID) if", "Away_Team = %s, Away_Score = %s, Home_Score = %s, Game_Date", "newGameCount+=1 query_string = \"INSERT INTO basketbet_data.all_games (Game_ID, Game_Name, Home_Team, Home_Odds,", "* FROM basketbet_data.all_games WHERE Game_Date = %s' gameDate = (str(team0OddsInfo[a][2]),)", "+ datetime.timedelta(days=-1) tomorrow = today + datetime.timedelta(days=1) #Removing the -", "Games every 5min | Odds every 6hr. counter=72 startTime =", "allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]] query_string = 'UPDATE all_games SET Game_Name", "%s)' mycursor.execute(query_string, query_list) mydb.commit() else: newGameCount+=1 query_string = \"INSERT INTO", "ODDS-API. def oddsGetter(): #Parameters for Odds Api. parameters = {", "%s, Game_Status = %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list)", "%s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) b+=1 a+=1 #For", "Game_Date, Game_Time, Game_Period, Game_Status) VALUES (%s, %s, %s, 0, %s,", "allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]] query_string = 'UPDATE", "team0_odds = 0 if team1_odds == '': team1_odds = 0", "site in game['sites']: if site['site_key'] == \"paddypower\": team0_odds = str(site['odds']['h2h'][0])", "while a < len(team0OddsInfo): query_string = 'SELECT * FROM basketbet_data.all_games", "WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) mydb.commit() else: newGameCount+=1 query_string", "updating the table in MYSQL with the games. c=0 updateCount=0", "'') tomorrowShort = str(tomorrow).replace('-', '') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort", "= 'SELECT * FROM basketbet_data.all_games WHERE Game_Date = %s' gameDate", "str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score'])", "user=\"\", password=\"\", database=\"basketbet_data\" ) mycursor = mydb.cursor() #Games List. allGames=[]", "< len(matchedGames): if matchedGames[b][2] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1],", "ODDS-API, and then imports them into a MySQL table, example", "Angeles Clippers' else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game Data", "yesterdayShort + '-' + yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort", "requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data = response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds = ''", "matchedGames[b][0]] query_string = 'UPDATE all_games SET Away_Odds = %s, Home_Odds", "= str(game['teams'][0]) team1 = str(game['teams'][1]) startTime = game['commence_time'] gameDate =", "todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort + '-' + todayShort tomorrowUrl", "to console what games were updated and what new games", "to the list. allGames.append((game_ID, game, home_Team, home_Score, away_Team, away_Score, game_Date,", "info to a list as strings. for game in data:", "print(str(updateCount) + ' GAMES UPDATED, and ' + str(newGameCount) +", "import json import datetime import time #Connection to the MYSQL", "json import datetime import time #Connection to the MYSQL Server.", "for game in data: for site in game['sites']: if site['site_key']", "where needed. while a < len(team0OddsInfo): query_string = 'SELECT *", "= today + datetime.timedelta(days=-1) tomorrow = today + datetime.timedelta(days=1) #Removing", "= str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period']) game_Status =", "from the dates for the URLs, then making the URLs.", "yesterdayShort = str(yesterday).replace('-', '') tomorrowShort = str(tomorrow).replace('-', '') yesterdayUrl =", "= %s, Game_Time = %s, Game_Period = %s, Game_Status =", "def newGetter(gameDay): #Json Response for YESTERDAY. response = requests.get(gameDay).json() gameData", "= mydb.cursor() #Games List. allGames=[] #Gets the game Data from", "from the ODDS-API, and then imports them into a MySQL", "counter==72: oddsGetter() counter=0 else: counter+=1 print('\\n') time.sleep(300 - ((time.time() -", "= \"INSERT INTO basketbet_data.all_games (Game_ID, Game_Name, Home_Team, Home_Odds, Home_Score, Away_Team,", "print('----------------------------------------') allGames=[] #Counter for the Odds script. if counter==72: oddsGetter()", "game = str(gameData[a]['name']) game_ID = str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7]) game_Time", "VALUES (%s, %s, %s, 0, %s, %s, 0, %s, %s,", "collect GameDay data. a=0 while a < len(gameData): game =", "= str(gameData[a]['name']) game_ID = str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7]) game_Time =", "allGames.append((game_ID, game, home_Team, home_Score, away_Team, away_Score, game_Date, game_Time, game_Period, game_Status))", "the table in MYSQL with the games. c=0 updateCount=0 newGameCount=0", "#JSON Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data = response.json()['data'] team0OddsInfo=[]", "= \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort + '-' + todayShort tomorrowUrl =", "= %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) b+=1 a+=1", "Name from LA Clippers to Los Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName'])", "game in data: for site in game['sites']: if site['site_key'] ==", "games were updated and what new games were inserted. print('----------------------------------------')", "'Los Angeles Clippers' else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game", "team1_odds = '' #Appends the odds info to a list", "b=0 while b < len(matchedGames): if matchedGames[b][2] == team0OddsInfo[a][0]: query_list", "str(newGameCount) + ' NEW GAMES inserted.') print('----------------------------------------') allGames=[] #Counter for", "gameID = (str(allGames[c][0]),) mycursor.execute(query_string, gameID) if mycursor.fetchone(): updateCount+=1 query_list =", "%s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) mydb.commit() else: newGameCount+=1", "= str(tomorrow).replace('-', '') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort + '-'", "\"paddypower\": team0_odds = str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1]) if team0_odds ==", "game_Period, game_Status)) a+=1 #Gets the Odds from the ODDS-API. def", "#Parameters for Odds Api. parameters = { \"sport\" : \"basketball_nba\",", "Clippers': home_Team = 'Los Angeles Clippers' else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName'])", "Game_Name, Home_Team, Home_Odds, Home_Score, Away_Team, Away_Odds, Away_Score, Game_Date, Game_Time, Game_Period,", "\"sport\" : \"basketball_nba\", \"region\" : \"uk\", \"mkt\" : \"h2h\", \"apiKey\"", "home_Team = 'Los Angeles Clippers' else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if", "= requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data = response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds =", "response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block to keep the", ": \"\", } #JSON Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data", "\"basketball_nba\", \"region\" : \"uk\", \"mkt\" : \"h2h\", \"apiKey\" : \"\",", "print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block to keep the script", "%s)' mycursor.execute(query_string, query_list) elif matchedGames[b][5] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1],", "allGames[c][9], allGames[c][0]] query_string = 'UPDATE all_games SET Game_Name = %s,", "%s, %s, %s, %s)\" mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1 #Prints to", "WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) elif matchedGames[b][5] == team0OddsInfo[a][0]:", "keep the script running then sleep for time 300 with", "response[\"events\"] #Loop through to collect GameDay data. a=0 while a", "for site in game['sites']: if site['site_key'] == \"paddypower\": team0_odds =", "Response for YESTERDAY. response = requests.get(gameDay).json() gameData = response[\"events\"] #Loop", "newGameCount=0 while c < len(allGames): query_string = 'SELECT * FROM", "Game_ID = %s' gameID = (str(allGames[c][0]),) mycursor.execute(query_string, gameID) if mycursor.fetchone():", "Imports Game Data from ESPN, and Odds from the ODDS-API,", "'-' + yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort + '-'", "0 team0 = str(game['teams'][0]) team1 = str(game['teams'][1]) startTime = game['commence_time']", "matchedGames[b][5] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string =", "length, it loops through one and Updates the tables where", "matchedGames[b][2] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string =", "mydb.cursor() #Games List. allGames=[] #Gets the game Data from ESPN", "inserted.') print('----------------------------------------') allGames=[] #Counter for the Odds script. if counter==72:", "\"apiKey\" : \"\", } #JSON Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters)", "\"\", } #JSON Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data =", "requests import json import datetime import time #Connection to the", "yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort + '-' + yesterdayShort todayUrl", "AT: ' + str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS inserted.\")", "b < len(matchedGames): if matchedGames[b][2] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1],", "+ datetime.timedelta(days=1) #Removing the - from the dates for the", "game_ID = str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1]) game_Period", "if matchedGames[b][2] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string", "%s, Away_Odds = %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list)", "gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames = mycursor.fetchall() b=0 while", "the - from the dates for the URLs, then making", "len(team0OddsInfo): query_string = 'SELECT * FROM basketbet_data.all_games WHERE Game_Date =", "WHERE Game_Date = %s' gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames", "= str(yesterday).replace('-', '') tomorrowShort = str(tomorrow).replace('-', '') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\"", "UPDATED, and ' + str(newGameCount) + ' NEW GAMES inserted.')", "== '': team0_odds = 0 if team1_odds == '': team1_odds", "Game_Name = %s, Home_Team = %s, Away_Team = %s, Away_Score", "inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block", "if counter==72: oddsGetter() counter=0 else: counter+=1 print('\\n') time.sleep(300 - ((time.time()", "= (str(allGames[c][0]),) mycursor.execute(query_string, gameID) if mycursor.fetchone(): updateCount+=1 query_list = [allGames[c][1],", "else: newGameCount+=1 query_string = \"INSERT INTO basketbet_data.all_games (Game_ID, Game_Name, Home_Team,", "= 'UPDATE all_games SET Home_Odds = %s, Away_Odds = %s", "Updates the tables where needed. while a < len(team0OddsInfo): query_string", "time 300 with counter set at 72 for Games every", "team0 = str(game['teams'][0]) team1 = str(game['teams'][1]) startTime = game['commence_time'] gameDate", "the same length, it loops through one and Updates the", "< len(allGames): query_string = 'SELECT * FROM basketbet_data.all_games WHERE Game_ID", "imports them into a MySQL table, example in workbench here", "= time.time() while True: #Today, Yesterday and Tomorrow. today =", "a MySQL table, example in workbench here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector", "team1_odds = str(site['odds']['h2h'][1]) if team0_odds == '': team0_odds = 0", "Tomorrow. today = datetime.date.today() yesterday = today + datetime.timedelta(days=-1) tomorrow", "data: for site in game['sites']: if site['site_key'] == \"paddypower\": team0_odds", "= %s)' mycursor.execute(query_string, query_list) mydb.commit() else: newGameCount+=1 query_string = \"INSERT", "mysql.connector import requests import json import datetime import time #Connection", "newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or updating the table in MYSQL with", "str(gameData[a]['name']) game_ID = str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1])", "to keep the script running then sleep for time 300", "and Updates the tables where needed. while a < len(team0OddsInfo):", "them into a MySQL table, example in workbench here https://puu.sh/HOKCj/ce199eec8e.png", "a+=1 #Gets the Odds from the ODDS-API. def oddsGetter(): #Parameters", "#Prints to console what games were updated and what new", "Game_Time, Game_Period, Game_Status) VALUES (%s, %s, %s, 0, %s, %s,", "print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:',", "str(game['teams'][1]) startTime = game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds,", "datetime.timedelta(days=-1) tomorrow = today + datetime.timedelta(days=1) #Removing the - from", "import requests import json import datetime import time #Connection to", "Home_Odds, Home_Score, Away_Team, Away_Odds, Away_Score, Game_Date, Game_Time, Game_Period, Game_Status) VALUES", "str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers': home_Team = 'Los Angeles Clippers' else:", "counter=72 startTime = time.time() while True: #Today, Yesterday and Tomorrow.", "tomorrow = today + datetime.timedelta(days=1) #Removing the - from the", "yesterday = today + datetime.timedelta(days=-1) tomorrow = today + datetime.timedelta(days=1)", "'ODDS UPDATE AT: ' + str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME", "+ tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or updating the table", "str(tomorrow).replace('-', '') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort + '-' +", "Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data = response.json()['data'] team0OddsInfo=[] team1OddsInfo=[]", "gameDate)) a=0 #as both lists are the same length, it", "print('\\n' + 'ODDS UPDATE AT: ' + str(time)) print('--------------------------------') print('--------------------------------')", "Away_Team, Away_Odds, Away_Score, Game_Date, Game_Time, Game_Period, Game_Status) VALUES (%s, %s,", "query_string = 'UPDATE all_games SET Home_Odds = %s, Away_Odds =", "example in workbench here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import requests import", "Home_Score = %s, Game_Date = %s, Game_Time = %s, Game_Period", "str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate)) a=0 #as", "team1_odds, gameDate)) a=0 #as both lists are the same length,", "+ todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort + '-' +", "= %s, Game_Period = %s, Game_Status = %s WHERE (Game_ID", "in workbench here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import requests import json", "= 'UPDATE all_games SET Away_Odds = %s, Home_Odds = %s", "= str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description']) home_Score =", "= '' team1_odds = '' #Appends the odds info to", "#as both lists are the same length, it loops through", "a=0 #as both lists are the same length, it loops", "str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers': away_Team = 'Los Angeles", "%s)\" mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1 #Prints to console what games", "allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]] query_string =", "counter=0 else: counter+=1 print('\\n') time.sleep(300 - ((time.time() - startTime) %", "Home_Team = %s, Away_Team = %s, Away_Score = %s, Home_Score", "MYSQL with the games. c=0 updateCount=0 newGameCount=0 while c <", "= { \"sport\" : \"basketball_nba\", \"region\" : \"uk\", \"mkt\" :", "game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds,", ": \"basketball_nba\", \"region\" : \"uk\", \"mkt\" : \"h2h\", \"apiKey\" :", "console what games were updated and what new games were", "== team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE", "= str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers': away_Team = 'Los", "the games. c=0 updateCount=0 newGameCount=0 while c < len(allGames): query_string", "show when odds were updated. mydb.commit() time = datetime.datetime.utcnow() print('\\n'", "while True: #Today, Yesterday and Tomorrow. today = datetime.date.today() yesterday", "fix to change Clippers Name from LA Clippers to Los", "Data from ESPN API given the link. def newGetter(gameDay): #Json", "allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]] query_string = 'UPDATE all_games SET", "'': team0_odds = 0 if team1_odds == '': team1_odds =", "Clippers': away_Team = 'Los Angeles Clippers' else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName'])", "the game Data from ESPN API given the link. def", "a=0 while a < len(gameData): game = str(gameData[a]['name']) game_ID =", "away_Team, away_Score, game_Date, game_Time, game_Period, game_Status)) a+=1 #Gets the Odds", "query_string = 'SELECT * FROM basketbet_data.all_games WHERE Game_Date = %s'", "= game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1,", "query_string = 'UPDATE all_games SET Away_Odds = %s, Home_Odds =", "Game_Status) VALUES (%s, %s, %s, 0, %s, %s, 0, %s,", "c+=1 #Prints to console what games were updated and what", "\"h2h\", \"apiKey\" : \"\", } #JSON Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\",", "basketbet_data.all_games (Game_ID, Game_Name, Home_Team, Home_Odds, Home_Score, Away_Team, Away_Odds, Away_Score, Game_Date,", "str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers': away_Team = 'Los Angeles Clippers' else:", "+ yesterdayShort + '-' + yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" +", "\"GAME ODDS inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------')", "= requests.get(gameDay).json() gameData = response[\"events\"] #Loop through to collect GameDay", "running then sleep for time 300 with counter set at", "at 72 for Games every 5min | Odds every 6hr.", "(str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames = mycursor.fetchall() b=0 while b <", "team0_odds == '': team0_odds = 0 if team1_odds == '':", "startTime = game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate))", "mydb.commit() time = datetime.datetime.utcnow() print('\\n' + 'ODDS UPDATE AT: '", "game_Time, game_Period, game_Status)) a+=1 #Gets the Odds from the ODDS-API.", "datetime.datetime.utcnow() print('\\n' + 'ODDS UPDATE AT: ' + str(time)) print('--------------------------------')", "for YESTERDAY. response = requests.get(gameDay).json() gameData = response[\"events\"] #Loop through", "allGames=[] #Counter for the Odds script. if counter==72: oddsGetter() counter=0", "what new games were inserted. print('----------------------------------------') print(str(updateCount) + ' GAMES", "Game Data from ESPN, and Odds from the ODDS-API, and", "today + datetime.timedelta(days=-1) tomorrow = today + datetime.timedelta(days=1) #Removing the", "' + str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS inserted.\") print('REMAINING", "home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers': away_Team =", "the MYSQL Server. mydb = mysql.connector.connect( host=\"\", user=\"\", password=\"\", database=\"basketbet_data\"", "%s, Away_Team = %s, Away_Score = %s, Home_Score = %s,", "%s' gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames = mycursor.fetchall() b=0", "the Game Data to the list. allGames.append((game_ID, game, home_Team, home_Score,", "%s, %s, 0, %s, %s, 0, %s, %s, %s, %s,", "c=0 updateCount=0 newGameCount=0 while c < len(allGames): query_string = 'SELECT", "mycursor.fetchone(): updateCount+=1 query_list = [allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6],", "away_Team = 'Los Angeles Clippers' else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends", "away_Score, game_Date, game_Time, game_Period, game_Status)) a+=1 #Gets the Odds from", "str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period'])", "import time #Connection to the MYSQL Server. mydb = mysql.connector.connect(", "game_Date, game_Time, game_Period, game_Status)) a+=1 #Gets the Odds from the", "were inserted. print('----------------------------------------') print(str(updateCount) + ' GAMES UPDATED, and '", "= [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET Home_Odds", "elif matchedGames[b][5] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string", "dates for the URLs, then making the URLs. todayShort =", "(Game_ID = %s)' mycursor.execute(query_string, query_list) mydb.commit() else: newGameCount+=1 query_string =", "#Appends the odds info to a list as strings. for", "FROM basketbet_data.all_games WHERE Game_Date = %s' gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string,", "oddsGetter() counter=0 else: counter+=1 print('\\n') time.sleep(300 - ((time.time() - startTime)", "the link. def newGetter(gameDay): #Json Response for YESTERDAY. response =", "and then imports them into a MySQL table, example in", "str(today).replace('-', '') yesterdayShort = str(yesterday).replace('-', '') tomorrowShort = str(tomorrow).replace('-', '')", "host=\"\", user=\"\", password=\"\", database=\"basketbet_data\" ) mycursor = mydb.cursor() #Games List.", "allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]] query_string", "in data: for site in game['sites']: if site['site_key'] == \"paddypower\":", "= str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to change Clippers", "= \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort + '-' + tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl)", "home_Score, away_Team, away_Score, game_Date, game_Time, game_Period, game_Status)) a+=1 #Gets the", "' NEW GAMES inserted.') print('----------------------------------------') allGames=[] #Counter for the Odds", "basketbet_data.all_games WHERE Game_ID = %s' gameID = (str(allGames[c][0]),) mycursor.execute(query_string, gameID)", "team0_odds = str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1]) if team0_odds == '':", "game_Period = str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score", "List. allGames=[] #Gets the game Data from ESPN API given", "= response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds = '' team1_odds = ''", "%s, 0, %s, %s, 0, %s, %s, %s, %s, %s)\"", "here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import requests import json import datetime", "Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers': home_Team = 'Los Angeles", "gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate))", "every 5min | Odds every 6hr. counter=72 startTime = time.time()", "updated and what new games were inserted. print('----------------------------------------') print(str(updateCount) +", "= str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate)) a=0", "gameDate) matchedGames = mycursor.fetchall() b=0 while b < len(matchedGames): if", "Clippers' else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers':", "game, home_Team, home_Score, away_Team, away_Score, game_Date, game_Time, game_Period, game_Status)) a+=1", "team0_odds = '' team1_odds = '' #Appends the odds info", "import mysql.connector import requests import json import datetime import time", "a < len(gameData): game = str(gameData[a]['name']) game_ID = str(gameData[a]['id']) game_Date", "to collect GameDay data. a=0 while a < len(gameData): game", "for Games every 5min | Odds every 6hr. counter=72 startTime", "print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block to", "team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate)) a=0 #as both lists are", "SET Game_Name = %s, Home_Team = %s, Away_Team = %s,", "'': team1_odds = 0 team0 = str(game['teams'][0]) team1 = str(game['teams'][1])", ") mycursor = mydb.cursor() #Games List. allGames=[] #Gets the game", "SET Home_Odds = %s, Away_Odds = %s WHERE (Game_ID =", "- from the dates for the URLs, then making the", "through one and Updates the tables where needed. while a", "Game_Period = %s, Game_Status = %s WHERE (Game_ID = %s)'", "== 'LA Clippers': away_Team = 'Los Angeles Clippers' else: away_Team", "same length, it loops through one and Updates the tables", "the list. allGames.append((game_ID, game, home_Team, home_Score, away_Team, away_Score, game_Date, game_Time,", "str(yesterday).replace('-', '') tomorrowShort = str(tomorrow).replace('-', '') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" +", "[team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET Away_Odds =", "%s, %s)\" mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1 #Prints to console what", "= today + datetime.timedelta(days=1) #Removing the - from the dates", "len(matchedGames): if matchedGames[b][2] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]]", "a list as strings. for game in data: for site", "the ODDS-API, and then imports them into a MySQL table,", "matchedGames = mycursor.fetchall() b=0 while b < len(matchedGames): if matchedGames[b][2]", "oddsGetter(): #Parameters for Odds Api. parameters = { \"sport\" :", "= 'SELECT * FROM basketbet_data.all_games WHERE Game_ID = %s' gameID", "set at 72 for Games every 5min | Odds every", "one and Updates the tables where needed. while a <", "team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games", "the URLs, then making the URLs. todayShort = str(today).replace('-', '')", "= mycursor.fetchall() b=0 while b < len(matchedGames): if matchedGames[b][2] ==", "database=\"basketbet_data\" ) mycursor = mydb.cursor() #Games List. allGames=[] #Gets the", "#For the console to show when odds were updated. mydb.commit()", "with counter set at 72 for Games every 5min |", "allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]] query_string = 'UPDATE all_games", "= %s, Away_Team = %s, Away_Score = %s, Home_Score =", "+ '-' + tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or updating", "Game Data to the list. allGames.append((game_ID, game, home_Team, home_Score, away_Team,", "= '' #Appends the odds info to a list as", "a < len(team0OddsInfo): query_string = 'SELECT * FROM basketbet_data.all_games WHERE", "= datetime.datetime.utcnow() print('\\n' + 'ODDS UPDATE AT: ' + str(time))", "while a < len(gameData): game = str(gameData[a]['name']) game_ID = str(gameData[a]['id'])", "%s, Game_Time = %s, Game_Period = %s, Game_Status = %s", "mysql.connector.connect( host=\"\", user=\"\", password=\"\", database=\"basketbet_data\" ) mycursor = mydb.cursor() #Games", "it loops through one and Updates the tables where needed.", "#Counter for the Odds script. if counter==72: oddsGetter() counter=0 else:", "Angeles Clippers' else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA", "= %s, Away_Odds = %s WHERE (Game_ID = %s)' mycursor.execute(query_string,", "REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block to keep", "game['sites']: if site['site_key'] == \"paddypower\": team0_odds = str(site['odds']['h2h'][0]) team1_odds =", "#This script Imports Game Data from ESPN, and Odds from", "time #Connection to the MYSQL Server. mydb = mysql.connector.connect( host=\"\",", "= %s, Game_Status = %s WHERE (Game_ID = %s)' mycursor.execute(query_string,", "Game_Time = %s, Game_Period = %s, Game_Status = %s WHERE", "for the URLs, then making the URLs. todayShort = str(today).replace('-',", "allGames[c]) mydb.commit() c+=1 #Prints to console what games were updated", "script running then sleep for time 300 with counter set", "= 'Los Angeles Clippers' else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName'])", "Data to the list. allGames.append((game_ID, game, home_Team, home_Score, away_Team, away_Score,", "todayShort = str(today).replace('-', '') yesterdayShort = str(yesterday).replace('-', '') tomorrowShort =", "%s, Home_Score = %s, Game_Date = %s, Game_Time = %s,", "mycursor.execute(query_string, query_list) elif matchedGames[b][5] == team0OddsInfo[a][0]: query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1],", "Odds from the ODDS-API. def oddsGetter(): #Parameters for Odds Api.", "Data from ESPN, and Odds from the ODDS-API, and then", "ESPN API given the link. def newGetter(gameDay): #Json Response for", "Game_Period, Game_Status) VALUES (%s, %s, %s, 0, %s, %s, 0,", "< len(team0OddsInfo): query_string = 'SELECT * FROM basketbet_data.all_games WHERE Game_Date", "REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block to keep the script running", "'LA Clippers': home_Team = 'Los Angeles Clippers' else: home_Team =", "to a list as strings. for game in data: for", "#Today, Yesterday and Tomorrow. today = datetime.date.today() yesterday = today", "= %s' gameID = (str(allGames[c][0]),) mycursor.execute(query_string, gameID) if mycursor.fetchone(): updateCount+=1", "Los Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers': home_Team =", "b+=1 a+=1 #For the console to show when odds were", "6hr. counter=72 startTime = time.time() while True: #Today, Yesterday and", "= response[\"events\"] #Loop through to collect GameDay data. a=0 while", "mydb.commit() c+=1 #Prints to console what games were updated and", "Away_Score, Game_Date, Game_Time, Game_Period, Game_Status) VALUES (%s, %s, %s, 0,", "to change Clippers Name from LA Clippers to Los Angeles", "= str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score =", "query_list = [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET", "mycursor = mydb.cursor() #Games List. allGames=[] #Gets the game Data", "+ tomorrowShort + '-' + tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting", "to Los Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers': home_Team", "response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------') #Block to keep the script running then", "mycursor.execute(query_string, query_list) b+=1 a+=1 #For the console to show when", "(Game_ID, Game_Name, Home_Team, Home_Odds, Home_Score, Away_Team, Away_Odds, Away_Score, Game_Date, Game_Time,", "#Removing the - from the dates for the URLs, then", "the URLs. todayShort = str(today).replace('-', '') yesterdayShort = str(yesterday).replace('-', '')", "print('--------------------------------') #Block to keep the script running then sleep for", "'') yesterdayShort = str(yesterday).replace('-', '') tomorrowShort = str(tomorrow).replace('-', '') yesterdayUrl", "parameters = { \"sport\" : \"basketball_nba\", \"region\" : \"uk\", \"mkt\"", "= str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to change Clippers Name from LA", "'SELECT * FROM basketbet_data.all_games WHERE Game_Date = %s' gameDate =", "allGames[c][8], allGames[c][9], allGames[c][0]] query_string = 'UPDATE all_games SET Game_Name =", "query_string = 'SELECT * FROM basketbet_data.all_games WHERE Game_ID = %s'", "console to show when odds were updated. mydb.commit() time =", "through to collect GameDay data. a=0 while a < len(gameData):", "'-' + todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort + '-'", "\"region\" : \"uk\", \"mkt\" : \"h2h\", \"apiKey\" : \"\", }", "True: #Today, Yesterday and Tomorrow. today = datetime.date.today() yesterday =", "mycursor.fetchall() b=0 while b < len(matchedGames): if matchedGames[b][2] == team0OddsInfo[a][0]:", "updateCount+=1 query_list = [allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7],", "'-' + tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or updating the", "%s, Home_Odds = %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list)", "+ str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS inserted.\") print('REMAINING REQUESTS:',", "Yesterday and Tomorrow. today = datetime.date.today() yesterday = today +", "Home_Odds = %s, Away_Odds = %s WHERE (Game_ID = %s)'", "' GAMES UPDATED, and ' + str(newGameCount) + ' NEW", "+ str(newGameCount) + ' NEW GAMES inserted.') print('----------------------------------------') allGames=[] #Counter", "UPDATE AT: ' + str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS", "[team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET Home_Odds =", "team1 = str(game['teams'][1]) startTime = game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9]", "== '': team1_odds = 0 team0 = str(game['teams'][0]) team1 =", "Away_Odds = %s, Home_Odds = %s WHERE (Game_ID = %s)'", "+ '-' + yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort +", "game_Time = str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description']) home_Score", "tomorrowShort = str(tomorrow).replace('-', '') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort +", "+ '-' + todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort +", "'UPDATE all_games SET Game_Name = %s, Home_Team = %s, Away_Team", "todayShort + '-' + todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort", "= %s, Home_Odds = %s WHERE (Game_ID = %s)' mycursor.execute(query_string,", "GAMES UPDATED, and ' + str(newGameCount) + ' NEW GAMES", "sleep for time 300 with counter set at 72 for", "newGetter(gameDay): #Json Response for YESTERDAY. response = requests.get(gameDay).json() gameData =", "query_string = \"INSERT INTO basketbet_data.all_games (Game_ID, Game_Name, Home_Team, Home_Odds, Home_Score,", "WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) b+=1 a+=1 #For the", "* FROM basketbet_data.all_games WHERE Game_ID = %s' gameID = (str(allGames[c][0]),)", "else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers': away_Team", "time = datetime.datetime.utcnow() print('\\n' + 'ODDS UPDATE AT: ' +", "Clippers' else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game Data to", "#Connection to the MYSQL Server. mydb = mysql.connector.connect( host=\"\", user=\"\",", "+ yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort + '-' +", "gameID) if mycursor.fetchone(): updateCount+=1 query_list = [allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5],", "print('----------------------------------------') print(str(updateCount) + ' GAMES UPDATED, and ' + str(newGameCount)", "} #JSON Response. response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data = response.json()['data']", "Home_Odds = %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) b+=1", "= str(site['odds']['h2h'][1]) if team0_odds == '': team0_odds = 0 if", "team1_odds == '': team1_odds = 0 team0 = str(game['teams'][0]) team1", "{ \"sport\" : \"basketball_nba\", \"region\" : \"uk\", \"mkt\" : \"h2h\",", "data = response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds = '' team1_odds =", "if mycursor.fetchone(): updateCount+=1 query_list = [allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3],", "= 'UPDATE all_games SET Game_Name = %s, Home_Team = %s,", "all_games SET Game_Name = %s, Home_Team = %s, Away_Team =", "script Imports Game Data from ESPN, and Odds from the", "\"uk\", \"mkt\" : \"h2h\", \"apiKey\" : \"\", } #JSON Response.", ": \"uk\", \"mkt\" : \"h2h\", \"apiKey\" : \"\", } #JSON", "= %s)' mycursor.execute(query_string, query_list) elif matchedGames[b][5] == team0OddsInfo[a][0]: query_list =", "(str(allGames[c][0]),) mycursor.execute(query_string, gameID) if mycursor.fetchone(): updateCount+=1 query_list = [allGames[c][1], allGames[c][2],", "Server. mydb = mysql.connector.connect( host=\"\", user=\"\", password=\"\", database=\"basketbet_data\" ) mycursor", "when odds were updated. mydb.commit() time = datetime.datetime.utcnow() print('\\n' +", "the tables where needed. while a < len(team0OddsInfo): query_string =", "= str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix", "if team1_odds == '': team1_odds = 0 team0 = str(game['teams'][0])", "the ODDS-API. def oddsGetter(): #Parameters for Odds Api. parameters =", "| Odds every 6hr. counter=72 startTime = time.time() while True:", "and what new games were inserted. print('----------------------------------------') print(str(updateCount) + '", "5min | Odds every 6hr. counter=72 startTime = time.time() while", "else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game Data to the", "then sleep for time 300 with counter set at 72", "all_games SET Away_Odds = %s, Home_Odds = %s WHERE (Game_ID", "tomorrowShort + '-' + tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or", "workbench here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import requests import json import", "from ESPN, and Odds from the ODDS-API, and then imports", "Api. parameters = { \"sport\" : \"basketball_nba\", \"region\" : \"uk\",", "Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers': home_Team = 'Los", "'' #Appends the odds info to a list as strings.", "needed. while a < len(team0OddsInfo): query_string = 'SELECT * FROM", "matchedGames[b][0]] query_string = 'UPDATE all_games SET Home_Odds = %s, Away_Odds", "home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to change", "Odds script. if counter==72: oddsGetter() counter=0 else: counter+=1 print('\\n') time.sleep(300", "(Game_ID = %s)' mycursor.execute(query_string, query_list) elif matchedGames[b][5] == team0OddsInfo[a][0]: query_list", "to the MYSQL Server. mydb = mysql.connector.connect( host=\"\", user=\"\", password=\"\",", "= %s, Home_Team = %s, Away_Team = %s, Away_Score =", "= %s, Away_Score = %s, Home_Score = %s, Game_Date =", "%s, Away_Score = %s, Home_Score = %s, Game_Date = %s,", "if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) == 'LA Clippers': away_Team = 'Los Angeles Clippers'", "NEW GAMES inserted.') print('----------------------------------------') allGames=[] #Counter for the Odds script.", "tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or updating the table in", "making the URLs. todayShort = str(today).replace('-', '') yesterdayShort = str(yesterday).replace('-',", "requests.get(gameDay).json() gameData = response[\"events\"] #Loop through to collect GameDay data.", "tables where needed. while a < len(team0OddsInfo): query_string = 'SELECT", "def oddsGetter(): #Parameters for Odds Api. parameters = { \"sport\"", "all_games SET Home_Odds = %s, Away_Odds = %s WHERE (Game_ID", "print(len(team0OddsInfo), \"GAME ODDS inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used'])", "strings. for game in data: for site in game['sites']: if", "= mysql.connector.connect( host=\"\", user=\"\", password=\"\", database=\"basketbet_data\" ) mycursor = mydb.cursor()", "allGames=[] #Gets the game Data from ESPN API given the", "table, example in workbench here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import requests", "updateCount=0 newGameCount=0 while c < len(allGames): query_string = 'SELECT *", "change Clippers Name from LA Clippers to Los Angeles Clippers.", "'UPDATE all_games SET Home_Odds = %s, Away_Odds = %s WHERE", "%s, 0, %s, %s, %s, %s, %s)\" mycursor.execute(query_string, allGames[c]) mydb.commit()", "ESPN, and Odds from the ODDS-API, and then imports them", "= [team0OddsInfo[a][1], team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET Away_Odds", "newGetter(tomorrowUrl) #Inserting or updating the table in MYSQL with the", "and Odds from the ODDS-API, and then imports them into", "str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to change Clippers Name", "the Odds from the ODDS-API. def oddsGetter(): #Parameters for Odds", "= %s, Game_Date = %s, Game_Time = %s, Game_Period =", "time.time() while True: #Today, Yesterday and Tomorrow. today = datetime.date.today()", "= \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort + '-' + yesterdayShort todayUrl =", "were updated and what new games were inserted. print('----------------------------------------') print(str(updateCount)", "games. c=0 updateCount=0 newGameCount=0 while c < len(allGames): query_string =", "%s, Game_Date = %s, Game_Time = %s, Game_Period = %s,", "= %s' gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames = mycursor.fetchall()", "ODDS inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED REQUESTS:', response.headers['x-requests-used']) print('--------------------------------') print('--------------------------------')", "both lists are the same length, it loops through one", "datetime import time #Connection to the MYSQL Server. mydb =", "basketbet_data.all_games WHERE Game_Date = %s' gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate)", "MySQL table, example in workbench here https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import", "in MYSQL with the games. c=0 updateCount=0 newGameCount=0 while c", "'LA Clippers': away_Team = 'Los Angeles Clippers' else: away_Team =", "= %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) elif matchedGames[b][5]", "gameData = response[\"events\"] #Loop through to collect GameDay data. a=0", "== \"paddypower\": team0_odds = str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1]) if team0_odds", "%s, %s, 0, %s, %s, %s, %s, %s)\" mycursor.execute(query_string, allGames[c])", "#Appends the Game Data to the list. allGames.append((game_ID, game, home_Team,", "datetime.timedelta(days=1) #Removing the - from the dates for the URLs,", "' + str(newGameCount) + ' NEW GAMES inserted.') print('----------------------------------------') allGames=[]", "Away_Score = %s, Home_Score = %s, Game_Date = %s, Game_Time", "< len(gameData): game = str(gameData[a]['name']) game_ID = str(gameData[a]['id']) game_Date =", "or updating the table in MYSQL with the games. c=0", "home_Team, home_Score, away_Team, away_Score, game_Date, game_Time, game_Period, game_Status)) a+=1 #Gets", "counter set at 72 for Games every 5min | Odds", "gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate)) a=0 #as both lists are the", "today = datetime.date.today() yesterday = today + datetime.timedelta(days=-1) tomorrow =", "= 0 if team1_odds == '': team1_odds = 0 team0", "yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort + '-' + todayShort", "the console to show when odds were updated. mydb.commit() time", "datetime.date.today() yesterday = today + datetime.timedelta(days=-1) tomorrow = today +", "Home_Team, Home_Odds, Home_Score, Away_Team, Away_Odds, Away_Score, Game_Date, Game_Time, Game_Period, Game_Status)", "Odds from the ODDS-API, and then imports them into a", "str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game Data to the list. allGames.append((game_ID, game,", "the dates for the URLs, then making the URLs. todayShort", "Away_Odds, Away_Score, Game_Date, Game_Time, Game_Period, Game_Status) VALUES (%s, %s, %s,", "\"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + todayShort + '-' + todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\"", "= [allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9],", "FROM basketbet_data.all_games WHERE Game_ID = %s' gameID = (str(allGames[c][0]),) mycursor.execute(query_string,", "%s)' mycursor.execute(query_string, query_list) b+=1 a+=1 #For the console to show", "mydb.commit() else: newGameCount+=1 query_string = \"INSERT INTO basketbet_data.all_games (Game_ID, Game_Name,", "%s, %s, %s)\" mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1 #Prints to console", "to show when odds were updated. mydb.commit() time = datetime.datetime.utcnow()", "Game_Date = %s' gameDate = (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames =", "given the link. def newGetter(gameDay): #Json Response for YESTERDAY. response", "= str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game Data to the list. allGames.append((game_ID,", "str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1]) if team0_odds == '': team0_odds =", "#Games List. allGames=[] #Gets the game Data from ESPN API", "'SELECT * FROM basketbet_data.all_games WHERE Game_ID = %s' gameID =", "72 for Games every 5min | Odds every 6hr. counter=72", "len(gameData): game = str(gameData[a]['name']) game_ID = str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7])", "if team0_odds == '': team0_odds = 0 if team1_odds ==", "https://puu.sh/HOKCj/ce199eec8e.png import mysql.connector import requests import json import datetime import", "+ ' GAMES UPDATED, and ' + str(newGameCount) + '", "#Inserting or updating the table in MYSQL with the games.", "response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds = '' team1_odds = '' #Appends", "Odds every 6hr. counter=72 startTime = time.time() while True: #Today,", "#Block to keep the script running then sleep for time", "= %s)' mycursor.execute(query_string, query_list) b+=1 a+=1 #For the console to", "query_string = 'UPDATE all_games SET Game_Name = %s, Home_Team =", "games were inserted. print('----------------------------------------') print(str(updateCount) + ' GAMES UPDATED, and", "Away_Odds = %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) elif", "are the same length, it loops through one and Updates", "what games were updated and what new games were inserted.", "(%s, %s, %s, 0, %s, %s, 0, %s, %s, %s,", "print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining']) print('USED", "todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort + '-' + tomorrowShort", "with the games. c=0 updateCount=0 newGameCount=0 while c < len(allGames):", "str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to change Clippers Name from LA Clippers", "data. a=0 while a < len(gameData): game = str(gameData[a]['name']) game_ID", "[allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8], allGames[c][9], allGames[c][0]]", "GAMES inserted.') print('----------------------------------------') allGames=[] #Counter for the Odds script. if", "the odds info to a list as strings. for game", "0 if team1_odds == '': team1_odds = 0 team0 =", "response = requests.get(gameDay).json() gameData = response[\"events\"] #Loop through to collect", "site['site_key'] == \"paddypower\": team0_odds = str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1]) if", "away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the Game Data to the list.", "updated. mydb.commit() time = datetime.datetime.utcnow() print('\\n' + 'ODDS UPDATE AT:", "query_list = [allGames[c][1], allGames[c][2], allGames[c][4], allGames[c][5], allGames[c][3], allGames[c][6], allGames[c][7], allGames[c][8],", "'Los Angeles Clippers' else: home_Team = str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) if str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) ==", "team1OddsInfo.append((team1, team1_odds, gameDate)) a=0 #as both lists are the same", "the Odds script. if counter==72: oddsGetter() counter=0 else: counter+=1 print('\\n')", "list as strings. for game in data: for site in", "= datetime.date.today() yesterday = today + datetime.timedelta(days=-1) tomorrow = today", "str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo), \"GAME ODDS inserted.\") print('REMAINING REQUESTS:', response.headers['x-requests-remaining'])", "= %s, Home_Score = %s, Game_Date = %s, Game_Time =", "%s, %s, %s, %s, %s)\" mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1 #Prints", "while c < len(allGames): query_string = 'SELECT * FROM basketbet_data.all_games", "API given the link. def newGetter(gameDay): #Json Response for YESTERDAY.", "team1_odds = 0 team0 = str(game['teams'][0]) team1 = str(game['teams'][1]) startTime", "today + datetime.timedelta(days=1) #Removing the - from the dates for", "response = requests.get(\"https://api.the-odds-api.com/v3/odds/\", params=parameters) data = response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds", "== 'LA Clippers': home_Team = 'Los Angeles Clippers' else: home_Team", "query_list) b+=1 a+=1 #For the console to show when odds", "300 with counter set at 72 for Games every 5min", "GameDay data. a=0 while a < len(gameData): game = str(gameData[a]['name'])", ": \"h2h\", \"apiKey\" : \"\", } #JSON Response. response =", "new games were inserted. print('----------------------------------------') print(str(updateCount) + ' GAMES UPDATED,", "allGames[c][0]] query_string = 'UPDATE all_games SET Game_Name = %s, Home_Team", "+ ' NEW GAMES inserted.') print('----------------------------------------') allGames=[] #Counter for the", "for time 300 with counter set at 72 for Games", "team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET Away_Odds = %s,", "= str(today).replace('-', '') yesterdayShort = str(yesterday).replace('-', '') tomorrowShort = str(tomorrow).replace('-',", "0, %s, %s, %s, %s, %s)\" mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1", "team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate)) a=0 #as both lists", "link. def newGetter(gameDay): #Json Response for YESTERDAY. response = requests.get(gameDay).json()", "startTime = time.time() while True: #Today, Yesterday and Tomorrow. today", "newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl) #Inserting or updating the table in MYSQL", "from ESPN API given the link. def newGetter(gameDay): #Json Response", "%s, Game_Period = %s, Game_Status = %s WHERE (Game_ID =", "= str(game['teams'][1]) startTime = game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d %H:%M:%S'))[:-9] team0OddsInfo.append((team0,", "params=parameters) data = response.json()['data'] team0OddsInfo=[] team1OddsInfo=[] team0_odds = '' team1_odds", "odds info to a list as strings. for game in", "and Tomorrow. today = datetime.date.today() yesterday = today + datetime.timedelta(days=-1)", "YESTERDAY. response = requests.get(gameDay).json() gameData = response[\"events\"] #Loop through to", "game_Status)) a+=1 #Gets the Odds from the ODDS-API. def oddsGetter():", "= str(gameData[a]['id']) game_Date = str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1]) game_Period =", "game_Date = str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period']) game_Status", "team0OddsInfo=[] team1OddsInfo=[] team0_odds = '' team1_odds = '' #Appends the", "%s, Home_Team = %s, Away_Team = %s, Away_Score = %s,", "= (str(team0OddsInfo[a][2]),) mycursor.execute(query_string, gameDate) matchedGames = mycursor.fetchall() b=0 while b", "Clippers to Los Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers':", "game Data from ESPN API given the link. def newGetter(gameDay):", "INTO basketbet_data.all_games (Game_ID, Game_Name, Home_Team, Home_Odds, Home_Score, Away_Team, Away_Odds, Away_Score,", "and ' + str(newGameCount) + ' NEW GAMES inserted.') print('----------------------------------------')", "0, %s, %s, 0, %s, %s, %s, %s, %s)\" mycursor.execute(query_string,", "from LA Clippers to Los Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) ==", "c < len(allGames): query_string = 'SELECT * FROM basketbet_data.all_games WHERE", "= str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1]) if team0_odds == '': team0_odds", "mycursor.execute(query_string, allGames[c]) mydb.commit() c+=1 #Prints to console what games were", "script. if counter==72: oddsGetter() counter=0 else: counter+=1 print('\\n') time.sleep(300 -", "into a MySQL table, example in workbench here https://puu.sh/HOKCj/ce199eec8e.png import", "team1OddsInfo=[] team0_odds = '' team1_odds = '' #Appends the odds", "= %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) mydb.commit() else:", "mydb = mysql.connector.connect( host=\"\", user=\"\", password=\"\", database=\"basketbet_data\" ) mycursor =", "Odds Api. parameters = { \"sport\" : \"basketball_nba\", \"region\" :", "%H:%M:%S'))[:-9] team0OddsInfo.append((team0, team0_odds, gameDate)) team1OddsInfo.append((team1, team1_odds, gameDate)) a=0 #as both", "+ 'ODDS UPDATE AT: ' + str(time)) print('--------------------------------') print('--------------------------------') print(len(team0OddsInfo),", "mycursor.execute(query_string, query_list) mydb.commit() else: newGameCount+=1 query_string = \"INSERT INTO basketbet_data.all_games", "#Gets the Odds from the ODDS-API. def oddsGetter(): #Parameters for", "'UPDATE all_games SET Away_Odds = %s, Home_Odds = %s WHERE", "if site['site_key'] == \"paddypower\": team0_odds = str(site['odds']['h2h'][0]) team1_odds = str(site['odds']['h2h'][1])", "game_Status = str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick", "#Quick fix to change Clippers Name from LA Clippers to", "MYSQL Server. mydb = mysql.connector.connect( host=\"\", user=\"\", password=\"\", database=\"basketbet_data\" )", "table in MYSQL with the games. c=0 updateCount=0 newGameCount=0 while", "query_list) mydb.commit() else: newGameCount+=1 query_string = \"INSERT INTO basketbet_data.all_games (Game_ID,", "odds were updated. mydb.commit() time = datetime.datetime.utcnow() print('\\n' + 'ODDS", "'' team1_odds = '' #Appends the odds info to a", "URLs, then making the URLs. todayShort = str(today).replace('-', '') yesterdayShort", "#Json Response for YESTERDAY. response = requests.get(gameDay).json() gameData = response[\"events\"]", "str(game['teams'][0]) team1 = str(game['teams'][1]) startTime = game['commence_time'] gameDate = str(datetime.datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%d", "for the Odds script. if counter==72: oddsGetter() counter=0 else: counter+=1", "were updated. mydb.commit() time = datetime.datetime.utcnow() print('\\n' + 'ODDS UPDATE", "+ todayShort + '-' + todayShort tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" +", "\"mkt\" : \"h2h\", \"apiKey\" : \"\", } #JSON Response. response", "#Gets the game Data from ESPN API given the link.", "= 0 team0 = str(game['teams'][0]) team1 = str(game['teams'][1]) startTime =", "Home_Score, Away_Team, Away_Odds, Away_Score, Game_Date, Game_Time, Game_Period, Game_Status) VALUES (%s,", "while b < len(matchedGames): if matchedGames[b][2] == team0OddsInfo[a][0]: query_list =", "if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA Clippers': home_Team = 'Los Angeles Clippers'", "else: counter+=1 print('\\n') time.sleep(300 - ((time.time() - startTime) % 300))", "print('--------------------------------') print('--------------------------------') #Block to keep the script running then sleep", "%s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) elif matchedGames[b][5] ==", "password=\"\", database=\"basketbet_data\" ) mycursor = mydb.cursor() #Games List. allGames=[] #Gets", "team1OddsInfo[a][1], matchedGames[b][0]] query_string = 'UPDATE all_games SET Home_Odds = %s,", "in game['sites']: if site['site_key'] == \"paddypower\": team0_odds = str(site['odds']['h2h'][0]) team1_odds", "mycursor.execute(query_string, gameID) if mycursor.fetchone(): updateCount+=1 query_list = [allGames[c][1], allGames[c][2], allGames[c][4],", "Game_Status = %s WHERE (Game_ID = %s)' mycursor.execute(query_string, query_list) mydb.commit()", "then making the URLs. todayShort = str(today).replace('-', '') yesterdayShort =", "the script running then sleep for time 300 with counter", "as strings. for game in data: for site in game['sites']:", "\"INSERT INTO basketbet_data.all_games (Game_ID, Game_Name, Home_Team, Home_Odds, Home_Score, Away_Team, Away_Odds,", "mycursor.execute(query_string, gameDate) matchedGames = mycursor.fetchall() b=0 while b < len(matchedGames):", "tomorrowUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort + '-' + tomorrowShort newGetter(yesterdayUrl)", "LA Clippers to Los Angeles Clippers. if str(gameData[a]['competitions'][0]['competitors'][0]['team']['displayName']) == 'LA", "str(site['odds']['h2h'][1]) if team0_odds == '': team0_odds = 0 if team1_odds", "list. allGames.append((game_ID, game, home_Team, home_Score, away_Team, away_Score, game_Date, game_Time, game_Period,", "a+=1 #For the console to show when odds were updated.", "from the ODDS-API. def oddsGetter(): #Parameters for Odds Api. parameters", "\"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort + '-' + yesterdayShort todayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\"", "away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to change Clippers Name from", "SET Away_Odds = %s, Home_Odds = %s WHERE (Game_ID =", "lists are the same length, it loops through one and", "Game_Date = %s, Game_Time = %s, Game_Period = %s, Game_Status", "str(gameData[a]['status']['type']['description']) home_Score = str(gameData[a]['competitions'][0]['competitors'][0]['score']) away_Score = str(gameData[a]['competitions'][0]['competitors'][1]['score']) #Quick fix to", "'') yesterdayUrl = \"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + yesterdayShort + '-' + yesterdayShort", "Clippers Name from LA Clippers to Los Angeles Clippers. if", "(Game_ID = %s)' mycursor.execute(query_string, query_list) b+=1 a+=1 #For the console", "str(gameData[a]['date'][:-7]) game_Time = str(gameData[a]['date'][11:-1]) game_Period = str(gameData[a]['status']['period']) game_Status = str(gameData[a]['status']['type']['description'])", "URLs. todayShort = str(today).replace('-', '') yesterdayShort = str(yesterday).replace('-', '') tomorrowShort", "for Odds Api. parameters = { \"sport\" : \"basketball_nba\", \"region\"", "%s' gameID = (str(allGames[c][0]),) mycursor.execute(query_string, gameID) if mycursor.fetchone(): updateCount+=1 query_list", "every 6hr. counter=72 startTime = time.time() while True: #Today, Yesterday", "= 'Los Angeles Clippers' else: away_Team = str(gameData[a]['competitions'][0]['competitors'][1]['team']['displayName']) #Appends the", "len(allGames): query_string = 'SELECT * FROM basketbet_data.all_games WHERE Game_ID =", "\"http://site.api.espn.com/apis/site/v2/sports/basketball/nba/scoreboard?dates=\" + tomorrowShort + '-' + tomorrowShort newGetter(yesterdayUrl) newGetter(todayUrl) newGetter(tomorrowUrl)", "import datetime import time #Connection to the MYSQL Server. mydb", "#Loop through to collect GameDay data. a=0 while a <" ]
[ "test_generate(): args = (\"generate -b ubuntu:17.04 -p apt\" \" --arg", "\"/tmp/file.txt\"]' in out main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr()", "in out main(args.format('--workdir /home').split()) out, _ = capsys.readouterr() assert \"WORKDIR", "['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] main(args) out, _ =", "\"generate -b ubuntu:17.04 -p apt --no-check-urls {}\" main(args.format('--user=neuro').split()) out, _", "def test_generate_from_json(capsys, tmpdir): import json cmd = \"generate -b debian:stretch", "'-p', 'apt', '--no-check-urls'] main(args) out, _ = capsys.readouterr() assert \"FROM\"", "import pytest from neurodocker.neurodocker import create_parser, parse_args, main def test_generate():", "= (\"generate -b ubuntu:17.04 -p apt\" \" --arg FOO=BAR BAZ\"", "ubuntu\" main(args.split()) with pytest.raises(SystemExit): args = \"-p apt\" main(args.split()) with", "= slice(8, -19) assert true.split('\\n')[sl] == test.split('\\n')[sl] def test_generate_no_print(capsys): args", "= json.dumps(specs) filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd = \"generate --file", "out def test_generate_from_json(capsys, tmpdir): import json cmd = \"generate -b", "main(args.split()) def test_generate_opts(capsys): args = \"generate -b ubuntu:17.04 -p apt", "--add relpath/to/file2.txt /tmp/file2.txt\" \" --cmd '--arg1' '--arg2'\" \" --workdir /home\"", "the layer that # saves to JSON (with timestamp). sl", "[\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _ =", "capsys.readouterr() # These indices chop off the header (with timestamp)", "'apt', '--no-check-urls'] main(args) out, _ = capsys.readouterr() assert \"FROM\" in", "def test_generate_no_print(capsys): args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls']", "--fsl version=5.0.10\" \" --user=neuro\" \" --miniconda env_name=neuro conda_install=python=3.6.2\" \" --user=root\"", "in out def test_generate_from_json(capsys, tmpdir): import json cmd = \"generate", "' KEY2=\"VAL\"' in out main(args.format('--expose 1230 1231').split()) out, _ =", "test_generate_from_json(capsys, tmpdir): import json cmd = \"generate -b debian:stretch -p", "_ = capsys.readouterr() assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--env", "= ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] main(args) out, _", "--ants option=value\" with pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys): args = \"generate", "with pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys): args = \"generate -b ubuntu:17.04", "out and \"RUN\" in out args.append('--no-print-df') main(args) out, _ =", "= capsys.readouterr() assert \"EXPOSE 1230 1231\" in out main(args.format('--workdir /home').split())", "out, _ = capsys.readouterr() assert \"WORKDIR /home\" in out main(args.format('--install", "RUNecho').split()) out, _ = capsys.readouterr() assert \"RUNecho\" in out def", "def test_generate(): args = (\"generate -b ubuntu:17.04 -p apt\" \"", "FOO=BAR BAZ\" \" --afni version=latest\" \" --ants version=2.2.0\" \" --freesurfer", "\" --no-check-urls\" \" --expose 1234 9000\" \" --volume /var /usr/bin\"", "'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], ['c3d', {'version': '1.0.0'}]], 'neurodocker_version':", "args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] main(args) out,", "conda_install=python=3.6.2\" \" --user=root\" \" --mrtrix3\" \" --neurodebian os_codename=zesty download_server=usa-nh\" \"", "capsys.readouterr() specs = {'check_urls': True, 'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base',", "= capsys.readouterr() assert 'ENV KEY=\"VAL\" \\\\' in out assert '", "'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _", "\" --miniconda env_name=neuro conda_install=python=3.6.2\" \" --user=root\" \" --mrtrix3\" \" --neurodebian", "= capsys.readouterr() assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--env KEY=VAL", "1231').split()) out, _ = capsys.readouterr() assert \"EXPOSE 1230 1231\" in", "assert 'ENV KEY=\"VAL\" \\\\' in out assert ' KEY2=\"VAL\"' in", "[\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out, _ =", "\" --afni version=latest\" \" --ants version=2.2.0\" \" --freesurfer version=6.0.0\" \"", "in out main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _ = capsys.readouterr() assert", "out main(args.format('--expose 1230 1231').split()) out, _ = capsys.readouterr() assert \"EXPOSE", "BAZ=CAT\" \" --copy relpath/to/file.txt /tmp/file.txt\" \" --add relpath/to/file2.txt /tmp/file2.txt\" \"", "main(args.format('--user=neuro').split()) out, _ = capsys.readouterr() assert \"USER neuro\" in out", "'1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs) filepath =", "main(args.split()) with pytest.raises(SystemExit): args = \"-b ubuntu\" main(args.split()) with pytest.raises(SystemExit):", "path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]'", "JSON (with timestamp). sl = slice(8, -19) assert true.split('\\n')[sl] ==", "assert true.split('\\n')[sl] == test.split('\\n')[sl] def test_generate_no_print(capsys): args = ['generate', '-b',", "capsys.readouterr() assert \"WORKDIR /home\" in out main(args.format('--install vi').split()) out, _", "capsys.readouterr() assert not out def test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\") args", "\"/tmp/file.txt\"]' in out main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _ = capsys.readouterr()", "neuro\" in out main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr()", "header (with timestamp) and the layer that # saves to", "'ubuntu:17.04', '-p', 'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df', '-o', outfile.strpath, '--no-check-urls'] main(args)", "args = \"generate -b ubuntu -p apt --ants option=value\" with", "main(gen_cmd.split()) test, _ = capsys.readouterr() # These indices chop off", "saves to JSON (with timestamp). sl = slice(8, -19) assert", "\"-p apt\" main(args.split()) with pytest.raises(SystemExit): main() args = \"generate -b", "out, _ = capsys.readouterr() assert \"RUNecho\" in out def test_generate_from_json(capsys,", "in out and \"RUN\" in out args.append('--no-print-df') main(args) out, _", "capsys.readouterr() assert 'ENV KEY=\"VAL\" \\\\' in out assert ' KEY2=\"VAL\"'", "\" --add relpath/to/file2.txt /tmp/file2.txt\" \" --cmd '--arg1' '--arg2'\" \" --workdir", "version=5.0.10\" \" --user=neuro\" \" --miniconda env_name=neuro conda_install=python=3.6.2\" \" --user=root\" \"", "pytest.raises(SystemExit): main() args = \"generate -b ubuntu -p apt --ants", "{'check_urls': True, 'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], ['c3d', {'version':", "args = \"generate -b ubuntu:17.04 -p apt --no-check-urls {}\" main(args.format('--user=neuro').split())", "git\" \" --user=neuro\" ) main(args.split()) with pytest.raises(SystemExit): args = \"-b", "apt\" main(args.split()) with pytest.raises(SystemExit): main() args = \"generate -b ubuntu", "= capsys.readouterr() assert \"RUNecho\" in out def test_generate_from_json(capsys, tmpdir): import", "outfile.strpath, '--no-check-urls'] main(args) assert outfile.read(), \"saved Dockerfile is empty\" assert", "option=value\" with pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys): args = \"generate -b", "\" --ants version=2.2.0\" \" --freesurfer version=6.0.0\" \" --fsl version=5.0.10\" \"", "= \"generate -b ubuntu -p apt --ants option=value\" with pytest.raises(ValueError):", "main(cmd.split()) true, _ = capsys.readouterr() specs = {'check_urls': True, 'generation_timestamp':", "/tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in", "'--no-print-df', '-o', outfile.strpath, '--no-check-urls'] main(args) assert outfile.read(), \"saved Dockerfile is", "\"-b ubuntu\" main(args.split()) with pytest.raises(SystemExit): args = \"-p apt\" main(args.split())", "timestamp) and the layer that # saves to JSON (with", "'apt'} str_specs = json.dumps(specs) filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd =", "true.split('\\n')[sl] == test.split('\\n')[sl] def test_generate_no_print(capsys): args = ['generate', '-b', 'ubuntu:17.04',", "--file {}\".format(filepath) main(gen_cmd.split()) test, _ = capsys.readouterr() # These indices", "import json cmd = \"generate -b debian:stretch -p apt --c3d", "'-p', 'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df', '-o', outfile.strpath, '--no-check-urls'] main(args) assert", "__future__ import absolute_import, unicode_literals import sys import pytest from neurodocker.neurodocker", "out, _ = capsys.readouterr() assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out", "\"\"\"Tests for neurodocker.main\"\"\" # Author: <NAME> <<EMAIL>> from __future__ import", "\" --user=root\" \" --mrtrix3\" \" --neurodebian os_codename=zesty download_server=usa-nh\" \" --spm", "main(args.split()) with pytest.raises(SystemExit): args = \"-p apt\" main(args.split()) with pytest.raises(SystemExit):", "from __future__ import absolute_import, unicode_literals import sys import pytest from", "= capsys.readouterr() assert \"WORKDIR /home\" in out main(args.format('--install vi').split()) out,", "debian:stretch -p apt --c3d version=1.0.0\" main(cmd.split()) true, _ = capsys.readouterr()", "tmpdir): import json cmd = \"generate -b debian:stretch -p apt", "main(args.format('--instruction RUNecho').split()) out, _ = capsys.readouterr() assert \"RUNecho\" in out", "\" --volume /var /usr/bin\" \" --label FOO=BAR BAZ=CAT\" \" --copy", "main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'ADD [\"path/to/file.txt\",", "= capsys.readouterr() # These indices chop off the header (with", "capsys.readouterr() assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--copy path/to/file.txt /tmp/file.txt').split())", "ubuntu:17.04 -p apt\" \" --arg FOO=BAR BAZ\" \" --afni version=latest\"", "import absolute_import, unicode_literals import sys import pytest from neurodocker.neurodocker import", "'instructions': [['base', 'debian:stretch'], ['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'}", "-b ubuntu:17.04 -p apt\" \" --arg FOO=BAR BAZ\" \" --afni", "'pkg_manager': 'apt'} str_specs = json.dumps(specs) filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd", ") main(args.split()) with pytest.raises(SystemExit): args = \"-b ubuntu\" main(args.split()) with", "args = \"-b ubuntu\" main(args.split()) with pytest.raises(SystemExit): args = \"-p", "= capsys.readouterr() assert \"USER neuro\" in out main(args.format('--add path/to/file.txt /tmp/file.txt').split())", "gen_cmd = \"generate --file {}\".format(filepath) main(gen_cmd.split()) test, _ = capsys.readouterr()", "'--arg1' '--arg2'\" \" --workdir /home\" \" --install git\" \" --user=neuro\"", "--workdir /home\" \" --install git\" \" --user=neuro\" ) main(args.split()) with", "assert \"FROM\" in out and \"RUN\" in out args.append('--no-print-df') main(args)", "['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs)", "\" --freesurfer version=6.0.0\" \" --fsl version=5.0.10\" \" --user=neuro\" \" --miniconda", "\"vi\" in out main(args.format('--instruction RUNecho').split()) out, _ = capsys.readouterr() assert", "= capsys.readouterr() assert \"vi\" in out main(args.format('--instruction RUNecho').split()) out, _", "version=latest\" \" --ants version=2.2.0\" \" --freesurfer version=6.0.0\" \" --fsl version=5.0.10\"", "--cmd '--arg1' '--arg2'\" \" --workdir /home\" \" --install git\" \"", "main(args.format('--expose 1230 1231').split()) out, _ = capsys.readouterr() assert \"EXPOSE 1230", "assert outfile.read(), \"saved Dockerfile is empty\" assert \"git clone https://github.com/MRtrix3/mrtrix3.git\"", "args = (\"generate -b ubuntu:17.04 -p apt\" \" --arg FOO=BAR", "pytest from neurodocker.neurodocker import create_parser, parse_args, main def test_generate(): args", "= {'check_urls': True, 'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], ['c3d',", "(with timestamp). sl = slice(8, -19) assert true.split('\\n')[sl] == test.split('\\n')[sl]", "\"generate -b ubuntu -p apt --ants option=value\" with pytest.raises(ValueError): main(args.split())", "main() args = \"generate -b ubuntu -p apt --ants option=value\"", "pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys): args = \"generate -b ubuntu:17.04 -p", "= tmpdir.join(\"test.txt\") args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3',", "\" --expose 1234 9000\" \" --volume /var /usr/bin\" \" --label", "assert ' KEY2=\"VAL\"' in out main(args.format('--expose 1230 1231').split()) out, _", "the header (with timestamp) and the layer that # saves", "= \"-p apt\" main(args.split()) with pytest.raises(SystemExit): main() args = \"generate", "_ = capsys.readouterr() assert \"EXPOSE 1230 1231\" in out main(args.format('--workdir", "= capsys.readouterr() assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--copy path/to/file.txt", "in out main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert", "neurodocker.neurodocker import create_parser, parse_args, main def test_generate(): args = (\"generate", "= \"generate -b ubuntu:17.04 -p apt --no-check-urls {}\" main(args.format('--user=neuro').split()) out,", "[['base', 'debian:stretch'], ['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs", "path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]'", "that # saves to JSON (with timestamp). sl = slice(8,", "KEY2=VAL').split()) out, _ = capsys.readouterr() assert 'ENV KEY=\"VAL\" \\\\' in", "capsys.readouterr() assert \"FROM\" in out and \"RUN\" in out args.append('--no-print-df')", "assert \"USER neuro\" in out main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out, _", "['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df', '-o', outfile.strpath,", "--copy relpath/to/file.txt /tmp/file.txt\" \" --add relpath/to/file2.txt /tmp/file2.txt\" \" --cmd '--arg1'", "# Author: <NAME> <<EMAIL>> from __future__ import absolute_import, unicode_literals import", "/tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in", "= capsys.readouterr() assert \"FROM\" in out and \"RUN\" in out", "_ = capsys.readouterr() assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--copy", "--neurodebian os_codename=zesty download_server=usa-nh\" \" --spm version=12 matlab_version=R2017a\" \" --no-check-urls\" \"", "\" --copy relpath/to/file.txt /tmp/file.txt\" \" --add relpath/to/file2.txt /tmp/file2.txt\" \" --cmd", "chop off the header (with timestamp) and the layer that", "def test_generate_opts(capsys): args = \"generate -b ubuntu:17.04 -p apt --no-check-urls", "out, _ = capsys.readouterr() assert \"EXPOSE 1230 1231\" in out", "'-o', outfile.strpath, '--no-check-urls'] main(args) assert outfile.read(), \"saved Dockerfile is empty\"", "_ = capsys.readouterr() assert \"WORKDIR /home\" in out main(args.format('--install vi').split())", "9000\" \" --volume /var /usr/bin\" \" --label FOO=BAR BAZ=CAT\" \"", "in out args.append('--no-print-df') main(args) out, _ = capsys.readouterr() assert not", "\" --user=neuro\" ) main(args.split()) with pytest.raises(SystemExit): args = \"-b ubuntu\"", "--spm version=12 matlab_version=R2017a\" \" --no-check-urls\" \" --expose 1234 9000\" \"", "assert \"WORKDIR /home\" in out main(args.format('--install vi').split()) out, _ =", "cmd = \"generate -b debian:stretch -p apt --c3d version=1.0.0\" main(cmd.split())", "'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs) filepath = tmpdir.join(\"specs.json\")", "\" --arg FOO=BAR BAZ\" \" --afni version=latest\" \" --ants version=2.2.0\"", "\" --install git\" \" --user=neuro\" ) main(args.split()) with pytest.raises(SystemExit): args", "main(args.format('--install vi').split()) out, _ = capsys.readouterr() assert \"vi\" in out", "main(args) out, _ = capsys.readouterr() assert not out def test_generate_save(tmpdir):", "1230 1231\" in out main(args.format('--workdir /home').split()) out, _ = capsys.readouterr()", "ubuntu -p apt --ants option=value\" with pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys):", "\"generate -b debian:stretch -p apt --c3d version=1.0.0\" main(cmd.split()) true, _", "_ = capsys.readouterr() assert \"RUNecho\" in out def test_generate_from_json(capsys, tmpdir):", "\\\\' in out assert ' KEY2=\"VAL\"' in out main(args.format('--expose 1230", "{}\" main(args.format('--user=neuro').split()) out, _ = capsys.readouterr() assert \"USER neuro\" in", "outfile = tmpdir.join(\"test.txt\") args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt',", "KEY2=\"VAL\"' in out main(args.format('--expose 1230 1231').split()) out, _ = capsys.readouterr()", "# These indices chop off the header (with timestamp) and", "and the layer that # saves to JSON (with timestamp).", "= \"generate -b debian:stretch -p apt --c3d version=1.0.0\" main(cmd.split()) true,", "out assert ' KEY2=\"VAL\"' in out main(args.format('--expose 1230 1231').split()) out,", "'2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], ['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17',", "\"WORKDIR /home\" in out main(args.format('--install vi').split()) out, _ = capsys.readouterr()", "str_specs = json.dumps(specs) filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd = \"generate", "\" --spm version=12 matlab_version=R2017a\" \" --no-check-urls\" \" --expose 1234 9000\"", "capsys.readouterr() assert \"RUNecho\" in out def test_generate_from_json(capsys, tmpdir): import json", "main(args.split()) with pytest.raises(SystemExit): main() args = \"generate -b ubuntu -p", "matlab_version=R2017a\" \" --no-check-urls\" \" --expose 1234 9000\" \" --volume /var", "os_codename=zesty download_server=usa-nh\" \" --spm version=12 matlab_version=R2017a\" \" --no-check-urls\" \" --expose", "FOO=BAR BAZ=CAT\" \" --copy relpath/to/file.txt /tmp/file.txt\" \" --add relpath/to/file2.txt /tmp/file2.txt\"", "\" --user=neuro\" \" --miniconda env_name=neuro conda_install=python=3.6.2\" \" --user=root\" \" --mrtrix3\"", "out, _ = capsys.readouterr() assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out", "<<EMAIL>> from __future__ import absolute_import, unicode_literals import sys import pytest", "out, _ = capsys.readouterr() assert not out def test_generate_save(tmpdir): outfile", "args.append('--no-print-df') main(args) out, _ = capsys.readouterr() assert not out def", "'--no-check-urls'] main(args) out, _ = capsys.readouterr() assert \"FROM\" in out", "download_server=usa-nh\" \" --spm version=12 matlab_version=R2017a\" \" --no-check-urls\" \" --expose 1234", "ubuntu:17.04 -p apt --no-check-urls {}\" main(args.format('--user=neuro').split()) out, _ = capsys.readouterr()", "not out def test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\") args = ['generate',", "to JSON (with timestamp). sl = slice(8, -19) assert true.split('\\n')[sl]", "test_generate_no_print(capsys): args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] main(args)", "out main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'ADD", "KEY=VAL KEY2=VAL').split()) out, _ = capsys.readouterr() assert 'ENV KEY=\"VAL\" \\\\'", "import sys import pytest from neurodocker.neurodocker import create_parser, parse_args, main", "version=6.0.0\" \" --fsl version=5.0.10\" \" --user=neuro\" \" --miniconda env_name=neuro conda_install=python=3.6.2\"", "1230 1231').split()) out, _ = capsys.readouterr() assert \"EXPOSE 1230 1231\"", "--user=root\" \" --mrtrix3\" \" --neurodebian os_codename=zesty download_server=usa-nh\" \" --spm version=12", "/tmp/file.txt\" \" --add relpath/to/file2.txt /tmp/file2.txt\" \" --cmd '--arg1' '--arg2'\" \"", "\"RUN\" in out args.append('--no-print-df') main(args) out, _ = capsys.readouterr() assert", "capsys.readouterr() assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--env KEY=VAL KEY2=VAL').split())", "\" --mrtrix3\" \" --neurodebian os_codename=zesty download_server=usa-nh\" \" --spm version=12 matlab_version=R2017a\"", "_ = capsys.readouterr() specs = {'check_urls': True, 'generation_timestamp': '2017-08-31 21:49:04',", "off the header (with timestamp) and the layer that #", "= capsys.readouterr() specs = {'check_urls': True, 'generation_timestamp': '2017-08-31 21:49:04', 'instructions':", "tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd = \"generate --file {}\".format(filepath) main(gen_cmd.split()) test, _", "assert \"RUNecho\" in out def test_generate_from_json(capsys, tmpdir): import json cmd", "/home\" in out main(args.format('--install vi').split()) out, _ = capsys.readouterr() assert", "Author: <NAME> <<EMAIL>> from __future__ import absolute_import, unicode_literals import sys", "'--no-check-urls'] main(args) assert outfile.read(), \"saved Dockerfile is empty\" assert \"git", "_ = capsys.readouterr() assert not out def test_generate_save(tmpdir): outfile =", "out, _ = capsys.readouterr() assert 'ENV KEY=\"VAL\" \\\\' in out", "with pytest.raises(SystemExit): args = \"-b ubuntu\" main(args.split()) with pytest.raises(SystemExit): args", "== test.split('\\n')[sl] def test_generate_no_print(capsys): args = ['generate', '-b', 'ubuntu:17.04', '-p',", "'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df', '-o', outfile.strpath, '--no-check-urls'] main(args) assert outfile.read(),", "--user=neuro\" ) main(args.split()) with pytest.raises(SystemExit): args = \"-b ubuntu\" main(args.split())", "specs = {'check_urls': True, 'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'],", "--no-check-urls {}\" main(args.format('--user=neuro').split()) out, _ = capsys.readouterr() assert \"USER neuro\"", "(\"generate -b ubuntu:17.04 -p apt\" \" --arg FOO=BAR BAZ\" \"", "-p apt --c3d version=1.0.0\" main(cmd.split()) true, _ = capsys.readouterr() specs", "main def test_generate(): args = (\"generate -b ubuntu:17.04 -p apt\"", "-b ubuntu:17.04 -p apt --no-check-urls {}\" main(args.format('--user=neuro').split()) out, _ =", "'--mrtrix3', 'use_binaries=false', '--no-print-df', '-o', outfile.strpath, '--no-check-urls'] main(args) assert outfile.read(), \"saved", "/tmp/file2.txt\" \" --cmd '--arg1' '--arg2'\" \" --workdir /home\" \" --install", "parse_args, main def test_generate(): args = (\"generate -b ubuntu:17.04 -p", "filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd = \"generate --file {}\".format(filepath) main(gen_cmd.split())", "-b debian:stretch -p apt --c3d version=1.0.0\" main(cmd.split()) true, _ =", "def test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\") args = ['generate', '-b', 'ubuntu:17.04',", "in out main(args.format('--install vi').split()) out, _ = capsys.readouterr() assert \"vi\"", "\" --cmd '--arg1' '--arg2'\" \" --workdir /home\" \" --install git\"", "absolute_import, unicode_literals import sys import pytest from neurodocker.neurodocker import create_parser,", "--label FOO=BAR BAZ=CAT\" \" --copy relpath/to/file.txt /tmp/file.txt\" \" --add relpath/to/file2.txt", "'-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df', '-o', outfile.strpath, '--no-check-urls']", "tmpdir.join(\"test.txt\") args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3', 'use_binaries=false',", "relpath/to/file.txt /tmp/file.txt\" \" --add relpath/to/file2.txt /tmp/file2.txt\" \" --cmd '--arg1' '--arg2'\"", "out main(args.format('--workdir /home').split()) out, _ = capsys.readouterr() assert \"WORKDIR /home\"", "\"saved Dockerfile is empty\" assert \"git clone https://github.com/MRtrix3/mrtrix3.git\" in outfile.read()", "version=12 matlab_version=R2017a\" \" --no-check-urls\" \" --expose 1234 9000\" \" --volume", "_ = capsys.readouterr() assert \"vi\" in out main(args.format('--instruction RUNecho').split()) out,", "-p apt --ants option=value\" with pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys): args", "true, _ = capsys.readouterr() specs = {'check_urls': True, 'generation_timestamp': '2017-08-31", "unicode_literals import sys import pytest from neurodocker.neurodocker import create_parser, parse_args,", "<NAME> <<EMAIL>> from __future__ import absolute_import, unicode_literals import sys import", "_ = capsys.readouterr() assert 'ENV KEY=\"VAL\" \\\\' in out assert", "slice(8, -19) assert true.split('\\n')[sl] == test.split('\\n')[sl] def test_generate_no_print(capsys): args =", "/home\" \" --install git\" \" --user=neuro\" ) main(args.split()) with pytest.raises(SystemExit):", "\"FROM\" in out and \"RUN\" in out args.append('--no-print-df') main(args) out,", "apt --ants option=value\" with pytest.raises(ValueError): main(args.split()) def test_generate_opts(capsys): args =", "pytest.raises(SystemExit): args = \"-b ubuntu\" main(args.split()) with pytest.raises(SystemExit): args =", "out, _ = capsys.readouterr() assert \"FROM\" in out and \"RUN\"", "True, 'generation_timestamp': '2017-08-31 21:49:04', 'instructions': [['base', 'debian:stretch'], ['c3d', {'version': '1.0.0'}]],", "out args.append('--no-print-df') main(args) out, _ = capsys.readouterr() assert not out", "--mrtrix3\" \" --neurodebian os_codename=zesty download_server=usa-nh\" \" --spm version=12 matlab_version=R2017a\" \"", "\"USER neuro\" in out main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out, _ =", "neurodocker.main\"\"\" # Author: <NAME> <<EMAIL>> from __future__ import absolute_import, unicode_literals", "env_name=neuro conda_install=python=3.6.2\" \" --user=root\" \" --mrtrix3\" \" --neurodebian os_codename=zesty download_server=usa-nh\"", "vi').split()) out, _ = capsys.readouterr() assert \"vi\" in out main(args.format('--instruction", "_ = capsys.readouterr() assert \"USER neuro\" in out main(args.format('--add path/to/file.txt", "--volume /var /usr/bin\" \" --label FOO=BAR BAZ=CAT\" \" --copy relpath/to/file.txt", "capsys.readouterr() assert \"USER neuro\" in out main(args.format('--add path/to/file.txt /tmp/file.txt').split()) out,", "main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _ = capsys.readouterr() assert 'ENV KEY=\"VAL\"", "21:49:04', 'instructions': [['base', 'debian:stretch'], ['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager':", "out main(args.format('--instruction RUNecho').split()) out, _ = capsys.readouterr() assert \"RUNecho\" in", "--freesurfer version=6.0.0\" \" --fsl version=5.0.10\" \" --user=neuro\" \" --miniconda env_name=neuro", "outfile.read(), \"saved Dockerfile is empty\" assert \"git clone https://github.com/MRtrix3/mrtrix3.git\" in", "1234 9000\" \" --volume /var /usr/bin\" \" --label FOO=BAR BAZ=CAT\"", "from neurodocker.neurodocker import create_parser, parse_args, main def test_generate(): args =", "'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] main(args) out, _ = capsys.readouterr() assert", "\" --fsl version=5.0.10\" \" --user=neuro\" \" --miniconda env_name=neuro conda_install=python=3.6.2\" \"", "--ants version=2.2.0\" \" --freesurfer version=6.0.0\" \" --fsl version=5.0.10\" \" --user=neuro\"", "-p apt --no-check-urls {}\" main(args.format('--user=neuro').split()) out, _ = capsys.readouterr() assert", "sl = slice(8, -19) assert true.split('\\n')[sl] == test.split('\\n')[sl] def test_generate_no_print(capsys):", "apt\" \" --arg FOO=BAR BAZ\" \" --afni version=latest\" \" --ants", "These indices chop off the header (with timestamp) and the", "test, _ = capsys.readouterr() # These indices chop off the", "\" --label FOO=BAR BAZ=CAT\" \" --copy relpath/to/file.txt /tmp/file.txt\" \" --add", "--no-check-urls\" \" --expose 1234 9000\" \" --volume /var /usr/bin\" \"", "'use_binaries=false', '--no-print-df', '-o', outfile.strpath, '--no-check-urls'] main(args) assert outfile.read(), \"saved Dockerfile", "'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out, _", "= ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df', '-o',", "<filename>neurodocker/tests/test_neurodocker.py \"\"\"Tests for neurodocker.main\"\"\" # Author: <NAME> <<EMAIL>> from __future__", "KEY=\"VAL\" \\\\' in out assert ' KEY2=\"VAL\"' in out main(args.format('--expose", "\" --neurodebian os_codename=zesty download_server=usa-nh\" \" --spm version=12 matlab_version=R2017a\" \" --no-check-urls\"", "relpath/to/file2.txt /tmp/file2.txt\" \" --cmd '--arg1' '--arg2'\" \" --workdir /home\" \"", "and \"RUN\" in out args.append('--no-print-df') main(args) out, _ = capsys.readouterr()", "'ENV KEY=\"VAL\" \\\\' in out assert ' KEY2=\"VAL\"' in out", "with pytest.raises(SystemExit): main() args = \"generate -b ubuntu -p apt", "assert 'COPY [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--env KEY=VAL KEY2=VAL').split()) out,", "-p apt\" \" --arg FOO=BAR BAZ\" \" --afni version=latest\" \"", "import create_parser, parse_args, main def test_generate(): args = (\"generate -b", "= \"generate --file {}\".format(filepath) main(gen_cmd.split()) test, _ = capsys.readouterr() #", "_ = capsys.readouterr() assert \"FROM\" in out and \"RUN\" in", "test.split('\\n')[sl] def test_generate_no_print(capsys): args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt',", "version=2.2.0\" \" --freesurfer version=6.0.0\" \" --fsl version=5.0.10\" \" --user=neuro\" \"", "out, _ = capsys.readouterr() assert \"USER neuro\" in out main(args.format('--add", "'-b', 'ubuntu:17.04', '-p', 'apt', '--no-check-urls'] main(args) out, _ = capsys.readouterr()", "apt --c3d version=1.0.0\" main(cmd.split()) true, _ = capsys.readouterr() specs =", "pytest.raises(SystemExit): args = \"-p apt\" main(args.split()) with pytest.raises(SystemExit): main() args", "= capsys.readouterr() assert not out def test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\")", "test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\") args = ['generate', '-b', 'ubuntu:17.04', '-p',", "layer that # saves to JSON (with timestamp). sl =", "in out main(args.format('--expose 1230 1231').split()) out, _ = capsys.readouterr() assert", "(with timestamp) and the layer that # saves to JSON", "test_generate_opts(capsys): args = \"generate -b ubuntu:17.04 -p apt --no-check-urls {}\"", "in out main(args.format('--instruction RUNecho').split()) out, _ = capsys.readouterr() assert \"RUNecho\"", "/var /usr/bin\" \" --label FOO=BAR BAZ=CAT\" \" --copy relpath/to/file.txt /tmp/file.txt\"", "--arg FOO=BAR BAZ\" \" --afni version=latest\" \" --ants version=2.2.0\" \"", "1231\" in out main(args.format('--workdir /home').split()) out, _ = capsys.readouterr() assert", "assert \"EXPOSE 1230 1231\" in out main(args.format('--workdir /home').split()) out, _", "in out assert ' KEY2=\"VAL\"' in out main(args.format('--expose 1230 1231').split())", "-b ubuntu -p apt --ants option=value\" with pytest.raises(ValueError): main(args.split()) def", "/usr/bin\" \" --label FOO=BAR BAZ=CAT\" \" --copy relpath/to/file.txt /tmp/file.txt\" \"", "args = \"-p apt\" main(args.split()) with pytest.raises(SystemExit): main() args =", "\"EXPOSE 1230 1231\" in out main(args.format('--workdir /home').split()) out, _ =", "\"RUNecho\" in out def test_generate_from_json(capsys, tmpdir): import json cmd =", "assert \"vi\" in out main(args.format('--instruction RUNecho').split()) out, _ = capsys.readouterr()", "= \"-b ubuntu\" main(args.split()) with pytest.raises(SystemExit): args = \"-p apt\"", "sys import pytest from neurodocker.neurodocker import create_parser, parse_args, main def", "out main(args.format('--install vi').split()) out, _ = capsys.readouterr() assert \"vi\" in", "{}\".format(filepath) main(gen_cmd.split()) test, _ = capsys.readouterr() # These indices chop", "\" --workdir /home\" \" --install git\" \" --user=neuro\" ) main(args.split())", "-19) assert true.split('\\n')[sl] == test.split('\\n')[sl] def test_generate_no_print(capsys): args = ['generate',", "--c3d version=1.0.0\" main(cmd.split()) true, _ = capsys.readouterr() specs = {'check_urls':", "main(args) assert outfile.read(), \"saved Dockerfile is empty\" assert \"git clone", "--expose 1234 9000\" \" --volume /var /usr/bin\" \" --label FOO=BAR", "filepath.write(str_specs) gen_cmd = \"generate --file {}\".format(filepath) main(gen_cmd.split()) test, _ =", "out def test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\") args = ['generate', '-b',", "assert not out def test_generate_save(tmpdir): outfile = tmpdir.join(\"test.txt\") args =", "out main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'COPY", "_ = capsys.readouterr() # These indices chop off the header", "'0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs) filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs)", "# saves to JSON (with timestamp). sl = slice(8, -19)", "args = ['generate', '-b', 'ubuntu:17.04', '-p', 'apt', '--mrtrix3', 'use_binaries=false', '--no-print-df',", "BAZ\" \" --afni version=latest\" \" --ants version=2.2.0\" \" --freesurfer version=6.0.0\"", "for neurodocker.main\"\"\" # Author: <NAME> <<EMAIL>> from __future__ import absolute_import,", "out main(args.format('--env KEY=VAL KEY2=VAL').split()) out, _ = capsys.readouterr() assert 'ENV", "assert 'ADD [\"path/to/file.txt\", \"/tmp/file.txt\"]' in out main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out,", "capsys.readouterr() assert \"vi\" in out main(args.format('--instruction RUNecho').split()) out, _ =", "version=1.0.0\" main(cmd.split()) true, _ = capsys.readouterr() specs = {'check_urls': True,", "--afni version=latest\" \" --ants version=2.2.0\" \" --freesurfer version=6.0.0\" \" --fsl", "capsys.readouterr() assert \"EXPOSE 1230 1231\" in out main(args.format('--workdir /home').split()) out,", "{'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs = json.dumps(specs) filepath", "json.dumps(specs) filepath = tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd = \"generate --file {}\".format(filepath)", "--user=neuro\" \" --miniconda env_name=neuro conda_install=python=3.6.2\" \" --user=root\" \" --mrtrix3\" \"", "'--arg2'\" \" --workdir /home\" \" --install git\" \" --user=neuro\" )", "'debian:stretch'], ['c3d', {'version': '1.0.0'}]], 'neurodocker_version': '0.2.0-18-g9227b17', 'pkg_manager': 'apt'} str_specs =", "--install git\" \" --user=neuro\" ) main(args.split()) with pytest.raises(SystemExit): args =", "= tmpdir.join(\"specs.json\") filepath.write(str_specs) gen_cmd = \"generate --file {}\".format(filepath) main(gen_cmd.split()) test,", "timestamp). sl = slice(8, -19) assert true.split('\\n')[sl] == test.split('\\n')[sl] def", "indices chop off the header (with timestamp) and the layer", "apt --no-check-urls {}\" main(args.format('--user=neuro').split()) out, _ = capsys.readouterr() assert \"USER", "out, _ = capsys.readouterr() assert \"vi\" in out main(args.format('--instruction RUNecho').split())", "\"generate --file {}\".format(filepath) main(gen_cmd.split()) test, _ = capsys.readouterr() # These", "/home').split()) out, _ = capsys.readouterr() assert \"WORKDIR /home\" in out", "json cmd = \"generate -b debian:stretch -p apt --c3d version=1.0.0\"", "main(args.format('--workdir /home').split()) out, _ = capsys.readouterr() assert \"WORKDIR /home\" in", "in out main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert", "main(args) out, _ = capsys.readouterr() assert \"FROM\" in out and", "--miniconda env_name=neuro conda_install=python=3.6.2\" \" --user=root\" \" --mrtrix3\" \" --neurodebian os_codename=zesty", "create_parser, parse_args, main def test_generate(): args = (\"generate -b ubuntu:17.04", "with pytest.raises(SystemExit): args = \"-p apt\" main(args.split()) with pytest.raises(SystemExit): main()", "main(args.format('--copy path/to/file.txt /tmp/file.txt').split()) out, _ = capsys.readouterr() assert 'COPY [\"path/to/file.txt\"," ]
[ "CLEM CLEM_X10Y137 30 13 SLICE_X13Y137/AFF REG_INIT 1 FDRE CLEM CLEM_X10Y137", "# # Copyright (C) 2020 The Project U-Ray Authors. #", "governed by a ISC-style # license that can be found", "Use of this source code is governed by a ISC-style", "F G H which = ff_name[0] # LUT6 vs LUT5", "\"%s.ZINI\" % ff_name, 1 ^ init) ''' On name: The", "a control input that sets the FF value to one.", "$bel_type $used $usedstr\" CLEM CLEM_X10Y137 30 13 SLICE_X13Y137/AFF REG_INIT 1", "grid_x = line[2] grid_y = line[3] # Other code uses", "= line[4] site, ff_name = site_ff_name.split('/') ff_type = line[5] used", "that sets the FF value to one. Z => inversion", "# -*- coding: utf-8 -*- # # Copyright (C) 2020", "''' FDCE Primitive: D Flip-Flop with Clock Enable and Asynchronous", "you listed have a control input to set the FF", "FDCE Primitive: D Flip-Flop with Clock Enable and Asynchronous Clear", "(clear/reset), the other three primitives have a control input that", "Other code uses BEL name # SLICE_X12Y137/D2FF site_ff_name = line[4]", "line[2] grid_y = line[3] # Other code uses BEL name", "control input that sets the FF value to one. Z", "loc, bel, init = l.split(\",\") i = int(i) init =", "used: segmk.add_site_tag(site, \"%s.ZINI\" % ff_name, 1 ^ init) ''' On", "E F G H which = ff_name[0] # LUT6 vs", "Copyright (C) 2020 The Project U-Ray Authors. # # Use", "init) return ret top = loadtop() def vs2i(s): return {\"1'b0\":", "have a control input that sets the FF value to", "vs LUT5 FF is2 = '2' in ff_name if used:", "bel, init = l.split(\",\") i = int(i) init = int(init)", "f: ''' puts $fp \"$type $tile $grid_x $grid_y $ff $bel_type", "bits_per_word=16) def loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF '''", "zero (clear/reset), the other three primitives have a control input", "line[8] cinv = int(line[9]) init = vs2i(line[10]) # A B", "used: cel_name = line[7] cel_prim = line[8] cinv = int(line[9])", "= int(line[9]) init = vs2i(line[10]) # A B C D", "LUT6 vs LUT5 FF is2 = '2' in ff_name if", "Latch with Asynchronous Preset and Gate Enable ''' from prims", "# # SPDX-License-Identifier: ISC ''' FDCE Primitive: D Flip-Flop with", "i, prim, loc, bel, init = l.split(\",\") i = int(i)", "ff_name if used: segmk.add_site_tag(site, \"%s.ZINI\" % ff_name, 1 ^ init)", "Preset FDRE Primitive: D Flip-Flop with Clock Enable and Synchronous", "and Gate Enable LDPE Primitive: Transparent Data Latch with Asynchronous", "Project U-Ray Authors. # # Use of this source code", "30 13 SLICE_X13Y137/AFF REG_INIT 1 FDRE CLEM CLEM_X10Y137 30 13", "1}[s] print(\"Loading tags from design.txt\") with open(\"design.txt\", \"r\") as f:", "REG_INIT 1 FDRE CLEM CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF FF_INIT 0", "Gate Enable LDPE Primitive: Transparent Data Latch with Asynchronous Preset", "C D E F G H which = ff_name[0] #", "l in f: i, prim, loc, bel, init = l.split(\",\")", "f: i, prim, loc, bel, init = l.split(\",\") i =", "with Clock Enable and Synchronous Set LDCE Primitive: Transparent Data", "Flip-Flop with Clock Enable and Asynchronous Clear FDPE Primitive: D", "# license that can be found in the LICENSE file", "-*- coding: utf-8 -*- # # Copyright (C) 2020 The", "line in f: ''' puts $fp \"$type $tile $grid_x $grid_y", "13 SLICE_X13Y137/AFF REG_INIT 1 FDRE CLEM CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF", "= line[1] grid_x = line[2] grid_y = line[3] # Other", "cel_prim = line[8] cinv = int(line[9]) init = vs2i(line[10]) #", "segmk.add_site_tag(site, \"%s.ZRST\" % ff_name, cel_prim in ('FDRE', 'FDCE', 'LDCE')) segmk.compile()", "puts $fp \"$type $tile $grid_x $grid_y $ff $bel_type $used $usedstr\"", "$grid_y $ff $bel_type $used $usedstr\" CLEM CLEM_X10Y137 30 13 SLICE_X13Y137/AFF", "0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f = open('top.txt', 'r') f.readline()", "https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC ''' FDCE Primitive: D Flip-Flop", "D Flip-Flop with Clock Enable and Asynchronous Clear FDPE Primitive:", "input that sets the FF value to one. Z =>", "print(\"Loading tags from design.txt\") with open(\"design.txt\", \"r\") as f: for", "Flip-Flop with Clock Enable and Synchronous Set LDCE Primitive: Transparent", "= ff_name[0] # LUT6 vs LUT5 FF is2 = '2'", "input to set the FF value to zero (clear/reset), the", "init = l.split(\",\") i = int(i) init = int(init) ret[loc]", "CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF FF_INIT 0 ''' line = line.split()", "at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC ''' FDCE Primitive:", "site_ff_name = line[4] site, ff_name = site_ff_name.split('/') ff_type = line[5]", "\"$type $tile $grid_x $grid_y $ff $bel_type $used $usedstr\" CLEM CLEM_X10Y137", "with Asynchronous Clear and Gate Enable LDPE Primitive: Transparent Data", "line.split() tile_type = line[0] tile_name = line[1] grid_x = line[2]", "l.split(\",\") i = int(i) init = int(init) ret[loc] = (i,", "return {\"1'b0\": 0, \"1'b1\": 1}[s] print(\"Loading tags from design.txt\") with", "top = loadtop() def vs2i(s): return {\"1'b0\": 0, \"1'b1\": 1}[s]", "D E F G H which = ff_name[0] # LUT6", "LDCE Primitive: Transparent Data Latch with Asynchronous Clear and Gate", "Enable and Synchronous Reset FDSE Primitive: D Flip-Flop with Clock", "Synchronous Set LDCE Primitive: Transparent Data Latch with Asynchronous Clear", "to set the FF value to zero (clear/reset), the other", "and Asynchronous Preset FDRE Primitive: D Flip-Flop with Clock Enable", "import isff, isl from utils.segmaker import Segmaker segmk = Segmaker(\"design.bits\",", "= loadtop() def vs2i(s): return {\"1'b0\": 0, \"1'b1\": 1}[s] print(\"Loading", "2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f = open('top.txt', 'r') f.readline() ret =", "# Copyright (C) 2020 The Project U-Ray Authors. # #", "# Use of this source code is governed by a", "prim, loc, bel, init = l.split(\",\") i = int(i) init", "other three primitives have a control input that sets the", "a ISC-style # license that can be found in the", "= line[0] tile_name = line[1] grid_x = line[2] grid_y =", "FDPE Primitive: D Flip-Flop with Clock Enable and Asynchronous Preset", "by a ISC-style # license that can be found in", "open('top.txt', 'r') f.readline() ret = {} for l in f:", "= line[5] used = int(line[6]) cel_prim = None cel_name =", "-*- # # Copyright (C) 2020 The Project U-Ray Authors.", "ret top = loadtop() def vs2i(s): return {\"1'b0\": 0, \"1'b1\":", "Gate Enable ''' from prims import isff, isl from utils.segmaker", "sets the FF value to one. Z => inversion '''", "B C D E F G H which = ff_name[0]", "code uses BEL name # SLICE_X12Y137/D2FF site_ff_name = line[4] site,", "can be found in the LICENSE file or at #", "vs2i(s): return {\"1'b0\": 0, \"1'b1\": 1}[s] print(\"Loading tags from design.txt\")", "D Flip-Flop with Clock Enable and Synchronous Set LDCE Primitive:", "$grid_x $grid_y $ff $bel_type $used $usedstr\" CLEM CLEM_X10Y137 30 13", "LDPE Primitive: Transparent Data Latch with Asynchronous Preset and Gate", "= None if used: cel_name = line[7] cel_prim = line[8]", "Clear and Gate Enable LDPE Primitive: Transparent Data Latch with", "and Synchronous Reset FDSE Primitive: D Flip-Flop with Clock Enable", "file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC '''", "# LUT6 vs LUT5 FF is2 = '2' in ff_name", "is governed by a ISC-style # license that can be", "source code is governed by a ISC-style # license that", "i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f = open('top.txt', 'r')", "code is governed by a ISC-style # license that can", "Segmaker(\"design.bits\", bits_per_word=16) def loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF", "line[5] used = int(line[6]) cel_prim = None cel_name = None", "''' On name: The primitives you listed have a control", "1 ^ init) ''' On name: The primitives you listed", "'2' in ff_name if used: segmk.add_site_tag(site, \"%s.ZINI\" % ff_name, 1", "isff, isl from utils.segmaker import Segmaker segmk = Segmaker(\"design.bits\", bits_per_word=16)", "init) ''' On name: The primitives you listed have a", "this source code is governed by a ISC-style # license", "name # SLICE_X12Y137/D2FF site_ff_name = line[4] site, ff_name = site_ff_name.split('/')", "return ret top = loadtop() def vs2i(s): return {\"1'b0\": 0,", "set the FF value to zero (clear/reset), the other three", "None if used: cel_name = line[7] cel_prim = line[8] cinv", "$ff $bel_type $used $usedstr\" CLEM CLEM_X10Y137 30 13 SLICE_X13Y137/AFF REG_INIT", "primitives you listed have a control input to set the", "site, ff_name = site_ff_name.split('/') ff_type = line[5] used = int(line[6])", "\"%s.ZRST\" % ff_name, cel_prim in ('FDRE', 'FDCE', 'LDCE')) segmk.compile() segmk.write()", "Clock Enable and Asynchronous Preset FDRE Primitive: D Flip-Flop with", "# SLICE_X12Y137/D2FF site_ff_name = line[4] site, ff_name = site_ff_name.split('/') ff_type", "and Gate Enable ''' from prims import isff, isl from", "{} for l in f: i, prim, loc, bel, init", "Transparent Data Latch with Asynchronous Clear and Gate Enable LDPE", "loc, bel, init) return ret top = loadtop() def vs2i(s):", "On name: The primitives you listed have a control input", "= int(init) ret[loc] = (i, prim, loc, bel, init) return", "= site_ff_name.split('/') ff_type = line[5] used = int(line[6]) cel_prim =", "FF_INIT 0 ''' line = line.split() tile_type = line[0] tile_name", "grid_y = line[3] # Other code uses BEL name #", "with Clock Enable and Asynchronous Clear FDPE Primitive: D Flip-Flop", "site_ff_name.split('/') ff_type = line[5] used = int(line[6]) cel_prim = None", "import Segmaker segmk = Segmaker(\"design.bits\", bits_per_word=16) def loadtop(): ''' i,prim,loc,bel", "# https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC ''' FDCE Primitive: D", "int(init) ret[loc] = (i, prim, loc, bel, init) return ret", "cel_prim = None cel_name = None if used: cel_name =", "SLICE_X12Y137/D2FF FF_INIT 0 ''' line = line.split() tile_type = line[0]", "Enable LDPE Primitive: Transparent Data Latch with Asynchronous Preset and", "% ff_name, 1 ^ init) ''' On name: The primitives", "tags from design.txt\") with open(\"design.txt\", \"r\") as f: for line", "line[1] grid_x = line[2] grid_y = line[3] # Other code", "license that can be found in the LICENSE file or", "FF value to one. Z => inversion ''' segmk.add_site_tag(site, \"%s.ZRST\"", "SLICE_X12Y137/D2FF site_ff_name = line[4] site, ff_name = site_ff_name.split('/') ff_type =", "open(\"design.txt\", \"r\") as f: for line in f: ''' puts", "for l in f: i, prim, loc, bel, init =", "line = line.split() tile_type = line[0] tile_name = line[1] grid_x", "ISC-style # license that can be found in the LICENSE", "in ff_name if used: segmk.add_site_tag(site, \"%s.ZINI\" % ff_name, 1 ^", "Primitive: D Flip-Flop with Clock Enable and Synchronous Set LDCE", "ff_name = site_ff_name.split('/') ff_type = line[5] used = int(line[6]) cel_prim", "or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC ''' FDCE", "CLEM CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF FF_INIT 0 ''' line =", "(C) 2020 The Project U-Ray Authors. # # Use of", "30 13 SLICE_X12Y137/D2FF FF_INIT 0 ''' line = line.split() tile_type", "BEL name # SLICE_X12Y137/D2FF site_ff_name = line[4] site, ff_name =", "in f: ''' puts $fp \"$type $tile $grid_x $grid_y $ff", "python3 # -*- coding: utf-8 -*- # # Copyright (C)", "LUT5 FF is2 = '2' in ff_name if used: segmk.add_site_tag(site,", "the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier:", "13 SLICE_X12Y137/D2FF FF_INIT 0 ''' line = line.split() tile_type =", "= int(line[6]) cel_prim = None cel_name = None if used:", "design.txt\") with open(\"design.txt\", \"r\") as f: for line in f:", "def vs2i(s): return {\"1'b0\": 0, \"1'b1\": 1}[s] print(\"Loading tags from", "# # Use of this source code is governed by", "''' line = line.split() tile_type = line[0] tile_name = line[1]", "is2 = '2' in ff_name if used: segmk.add_site_tag(site, \"%s.ZINI\" %", "ff_name, 1 ^ init) ''' On name: The primitives you", "Clock Enable and Synchronous Reset FDSE Primitive: D Flip-Flop with", "line[7] cel_prim = line[8] cinv = int(line[9]) init = vs2i(line[10])", "0, \"1'b1\": 1}[s] print(\"Loading tags from design.txt\") with open(\"design.txt\", \"r\")", "Flip-Flop with Clock Enable and Asynchronous Preset FDRE Primitive: D", "Authors. # # Use of this source code is governed", "int(line[9]) init = vs2i(line[10]) # A B C D E", "FDRE Primitive: D Flip-Flop with Clock Enable and Synchronous Reset", "in f: i, prim, loc, bel, init = l.split(\",\") i", "cel_name = None if used: cel_name = line[7] cel_prim =", "The primitives you listed have a control input to set", "name: The primitives you listed have a control input to", "f.readline() ret = {} for l in f: i, prim,", "with Clock Enable and Asynchronous Preset FDRE Primitive: D Flip-Flop", "= int(i) init = int(init) ret[loc] = (i, prim, loc,", "primitives have a control input that sets the FF value", "# Other code uses BEL name # SLICE_X12Y137/D2FF site_ff_name =", "prim, loc, bel, init) return ret top = loadtop() def", "found in the LICENSE file or at # https://opensource.org/licenses/ISC #", "listed have a control input to set the FF value", "coding: utf-8 -*- # # Copyright (C) 2020 The Project", "Primitive: D Flip-Flop with Clock Enable and Asynchronous Preset FDRE", "value to zero (clear/reset), the other three primitives have a", "the FF value to zero (clear/reset), the other three primitives", "= Segmaker(\"design.bits\", bits_per_word=16) def loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF", "Enable and Asynchronous Clear FDPE Primitive: D Flip-Flop with Clock", "Primitive: Transparent Data Latch with Asynchronous Preset and Gate Enable", "CLEM_X10Y137 30 13 SLICE_X13Y137/AFF REG_INIT 1 FDRE CLEM CLEM_X10Y137 30", "tile_type = line[0] tile_name = line[1] grid_x = line[2] grid_y", "loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f =", "f = open('top.txt', 'r') f.readline() ret = {} for l", "Asynchronous Preset FDRE Primitive: D Flip-Flop with Clock Enable and", "with Clock Enable and Synchronous Reset FDSE Primitive: D Flip-Flop", "'r') f.readline() ret = {} for l in f: i,", "H which = ff_name[0] # LUT6 vs LUT5 FF is2", "from design.txt\") with open(\"design.txt\", \"r\") as f: for line in", "1 FDRE CLEM CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF FF_INIT 0 '''", "used = int(line[6]) cel_prim = None cel_name = None if", "Enable and Asynchronous Preset FDRE Primitive: D Flip-Flop with Clock", "ff_type = line[5] used = int(line[6]) cel_prim = None cel_name", "as f: for line in f: ''' puts $fp \"$type", "0 ''' line = line.split() tile_type = line[0] tile_name =", "1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f = open('top.txt', 'r') f.readline() ret", "= {} for l in f: i, prim, loc, bel,", "have a control input to set the FF value to", "to zero (clear/reset), the other three primitives have a control", "line[4] site, ff_name = site_ff_name.split('/') ff_type = line[5] used =", "int(line[6]) cel_prim = None cel_name = None if used: cel_name", "ret[loc] = (i, prim, loc, bel, init) return ret top", "Latch with Asynchronous Clear and Gate Enable LDPE Primitive: Transparent", "(i, prim, loc, bel, init) return ret top = loadtop()", "=> inversion ''' segmk.add_site_tag(site, \"%s.ZRST\" % ff_name, cel_prim in ('FDRE',", "init = int(init) ret[loc] = (i, prim, loc, bel, init)", "the other three primitives have a control input that sets", "''' f = open('top.txt', 'r') f.readline() ret = {} for", "segmk.add_site_tag(site, \"%s.ZINI\" % ff_name, 1 ^ init) ''' On name:", "Z => inversion ''' segmk.add_site_tag(site, \"%s.ZRST\" % ff_name, cel_prim in", "be found in the LICENSE file or at # https://opensource.org/licenses/ISC", "''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f = open('top.txt',", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright", "the FF value to one. Z => inversion ''' segmk.add_site_tag(site,", "Asynchronous Clear and Gate Enable LDPE Primitive: Transparent Data Latch", "line[3] # Other code uses BEL name # SLICE_X12Y137/D2FF site_ff_name", "and Synchronous Set LDCE Primitive: Transparent Data Latch with Asynchronous", "for line in f: ''' puts $fp \"$type $tile $grid_x", "from utils.segmaker import Segmaker segmk = Segmaker(\"design.bits\", bits_per_word=16) def loadtop():", "= line.split() tile_type = line[0] tile_name = line[1] grid_x =", "ff_name[0] # LUT6 vs LUT5 FF is2 = '2' in", "''' puts $fp \"$type $tile $grid_x $grid_y $ff $bel_type $used", "Enable ''' from prims import isff, isl from utils.segmaker import", "# SPDX-License-Identifier: ISC ''' FDCE Primitive: D Flip-Flop with Clock", "with open(\"design.txt\", \"r\") as f: for line in f: '''", "U-Ray Authors. # # Use of this source code is", "prims import isff, isl from utils.segmaker import Segmaker segmk =", "def loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF 2,FDPE_1,SLICE_X16Y100,B5FF 3,LDCE_1,SLICE_X17Y100,BFF ''' f", "Enable and Synchronous Set LDCE Primitive: Transparent Data Latch with", "Primitive: D Flip-Flop with Clock Enable and Synchronous Reset FDSE", "value to one. Z => inversion ''' segmk.add_site_tag(site, \"%s.ZRST\" %", "# A B C D E F G H which", "cinv = int(line[9]) init = vs2i(line[10]) # A B C", "Transparent Data Latch with Asynchronous Preset and Gate Enable '''", "line[0] tile_name = line[1] grid_x = line[2] grid_y = line[3]", "Primitive: Transparent Data Latch with Asynchronous Clear and Gate Enable", "''' segmk.add_site_tag(site, \"%s.ZRST\" % ff_name, cel_prim in ('FDRE', 'FDCE', 'LDCE'))", "\"1'b1\": 1}[s] print(\"Loading tags from design.txt\") with open(\"design.txt\", \"r\") as", "\"r\") as f: for line in f: ''' puts $fp", "if used: cel_name = line[7] cel_prim = line[8] cinv =", "A B C D E F G H which =", "bel, init) return ret top = loadtop() def vs2i(s): return", "''' from prims import isff, isl from utils.segmaker import Segmaker", "a control input to set the FF value to zero", "from prims import isff, isl from utils.segmaker import Segmaker segmk", "= line[7] cel_prim = line[8] cinv = int(line[9]) init =", "Data Latch with Asynchronous Preset and Gate Enable ''' from", "Preset and Gate Enable ''' from prims import isff, isl", "$used $usedstr\" CLEM CLEM_X10Y137 30 13 SLICE_X13Y137/AFF REG_INIT 1 FDRE", "Flip-Flop with Clock Enable and Synchronous Reset FDSE Primitive: D", "of this source code is governed by a ISC-style #", "and Asynchronous Clear FDPE Primitive: D Flip-Flop with Clock Enable", "tile_name = line[1] grid_x = line[2] grid_y = line[3] #", "^ init) ''' On name: The primitives you listed have", "Asynchronous Clear FDPE Primitive: D Flip-Flop with Clock Enable and", "int(i) init = int(init) ret[loc] = (i, prim, loc, bel,", "$fp \"$type $tile $grid_x $grid_y $ff $bel_type $used $usedstr\" CLEM", "Asynchronous Preset and Gate Enable ''' from prims import isff,", "$tile $grid_x $grid_y $ff $bel_type $used $usedstr\" CLEM CLEM_X10Y137 30", "= (i, prim, loc, bel, init) return ret top =", "with Asynchronous Preset and Gate Enable ''' from prims import", "$usedstr\" CLEM CLEM_X10Y137 30 13 SLICE_X13Y137/AFF REG_INIT 1 FDRE CLEM", "segmk = Segmaker(\"design.bits\", bits_per_word=16) def loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF 1,FDPE,SLICE_X15Y100,A5FF", "Set LDCE Primitive: Transparent Data Latch with Asynchronous Clear and", "FF value to zero (clear/reset), the other three primitives have", "SPDX-License-Identifier: ISC ''' FDCE Primitive: D Flip-Flop with Clock Enable", "inversion ''' segmk.add_site_tag(site, \"%s.ZRST\" % ff_name, cel_prim in ('FDRE', 'FDCE',", "= None cel_name = None if used: cel_name = line[7]", "The Project U-Ray Authors. # # Use of this source", "D Flip-Flop with Clock Enable and Synchronous Reset FDSE Primitive:", "i = int(i) init = int(init) ret[loc] = (i, prim,", "2020 The Project U-Ray Authors. # # Use of this", "= line[2] grid_y = line[3] # Other code uses BEL", "= '2' in ff_name if used: segmk.add_site_tag(site, \"%s.ZINI\" % ff_name,", "3,LDCE_1,SLICE_X17Y100,BFF ''' f = open('top.txt', 'r') f.readline() ret = {}", "SLICE_X13Y137/AFF REG_INIT 1 FDRE CLEM CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF FF_INIT", "= line[8] cinv = int(line[9]) init = vs2i(line[10]) # A", "Synchronous Reset FDSE Primitive: D Flip-Flop with Clock Enable and", "to one. Z => inversion ''' segmk.add_site_tag(site, \"%s.ZRST\" % ff_name,", "cel_name = line[7] cel_prim = line[8] cinv = int(line[9]) init", "Segmaker segmk = Segmaker(\"design.bits\", bits_per_word=16) def loadtop(): ''' i,prim,loc,bel 0,FDPE,SLICE_X12Y100,C5FF", "<filename>fuzzers/011-cle-ffconfig/generate.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- # #", "= l.split(\",\") i = int(i) init = int(init) ret[loc] =", "which = ff_name[0] # LUT6 vs LUT5 FF is2 =", "LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC", "Clock Enable and Synchronous Set LDCE Primitive: Transparent Data Latch", "Reset FDSE Primitive: D Flip-Flop with Clock Enable and Synchronous", "FDRE CLEM CLEM_X10Y137 30 13 SLICE_X12Y137/D2FF FF_INIT 0 ''' line", "f: for line in f: ''' puts $fp \"$type $tile", "control input to set the FF value to zero (clear/reset),", "None cel_name = None if used: cel_name = line[7] cel_prim", "Clock Enable and Asynchronous Clear FDPE Primitive: D Flip-Flop with", "ret = {} for l in f: i, prim, loc,", "{\"1'b0\": 0, \"1'b1\": 1}[s] print(\"Loading tags from design.txt\") with open(\"design.txt\",", "utf-8 -*- # # Copyright (C) 2020 The Project U-Ray", "Data Latch with Asynchronous Clear and Gate Enable LDPE Primitive:", "loadtop() def vs2i(s): return {\"1'b0\": 0, \"1'b1\": 1}[s] print(\"Loading tags", "one. Z => inversion ''' segmk.add_site_tag(site, \"%s.ZRST\" % ff_name, cel_prim", "= vs2i(line[10]) # A B C D E F G", "= open('top.txt', 'r') f.readline() ret = {} for l in", "isl from utils.segmaker import Segmaker segmk = Segmaker(\"design.bits\", bits_per_word=16) def", "that can be found in the LICENSE file or at", "ISC ''' FDCE Primitive: D Flip-Flop with Clock Enable and", "in the LICENSE file or at # https://opensource.org/licenses/ISC # #", "Primitive: D Flip-Flop with Clock Enable and Asynchronous Clear FDPE", "three primitives have a control input that sets the FF", "= line[3] # Other code uses BEL name # SLICE_X12Y137/D2FF", "G H which = ff_name[0] # LUT6 vs LUT5 FF", "if used: segmk.add_site_tag(site, \"%s.ZINI\" % ff_name, 1 ^ init) '''", "Clear FDPE Primitive: D Flip-Flop with Clock Enable and Asynchronous", "utils.segmaker import Segmaker segmk = Segmaker(\"design.bits\", bits_per_word=16) def loadtop(): '''", "uses BEL name # SLICE_X12Y137/D2FF site_ff_name = line[4] site, ff_name", "FDSE Primitive: D Flip-Flop with Clock Enable and Synchronous Set", "FF is2 = '2' in ff_name if used: segmk.add_site_tag(site, \"%s.ZINI\"", "D Flip-Flop with Clock Enable and Asynchronous Preset FDRE Primitive:", "vs2i(line[10]) # A B C D E F G H", "init = vs2i(line[10]) # A B C D E F" ]
[ "np.ndarray self._grad_logdet_metric: np.ndarray @property def requires_update(self) -> bool: o =", "or \\ self.inv_metric is None or \\ self.jac_metric is None", "value @jac_metric.deleter def jac_metric(self): del self._jac_metric @property def grad_logdet_metric(self): return", "the Hamiltonian requires higher derivatives of the metric, which vanish", "self.grad_log_posterior = grad_log_posterior self.metric = metric self.sqrtm_metric = sqrtm_metric self.inv_metric", "def __init__(self, position: np.ndarray, momentum: np.ndarray): super().__init__(position, momentum) self._jac_metric: np.ndarray", "jac_metric, grad_logdet_metric) def clear(self): super().clear() del self.jac_metric del self.grad_logdet_metric del", "self.grad_logdet_metric is None return o @property def jac_metric(self): return self._jac_metric", "jac_metric(self, value): self._jac_metric = value @jac_metric.deleter def jac_metric(self): del self._jac_metric", "import Callable import numpy as np from hmc.integrators.states.leapfrog_state import LeapfrogState", "state uses the Fisher information matrix to provide a position-dependent", "self.metric is None or \\ self.inv_metric is None or \\", "= sqrtm_metric self.inv_metric = inv_metric self.jac_metric = jac_metric self.grad_logdet_metric =", "riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric) def clear(self): super().clear() del self.jac_metric del", "def grad_logdet_metric(self): del self._grad_logdet_metric def update(self, auxiliaries: Callable): num_dims =", "riemannian.velocity(inv_metric, self.momentum) self.force = riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric) def clear(self):", "self.inv_metric is None or \\ self.jac_metric is None or \\", "= value @jac_metric.deleter def jac_metric(self): del self._jac_metric @property def grad_logdet_metric(self):", "hmc.linalg import solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog state uses", "such, computing the gradients of the Hamiltonian requires higher derivatives", "information matrix to provide a position-dependent Riemannian metric. As such,", "Riemannian metric. As such, computing the gradients of the Hamiltonian", "None or \\ self.grad_log_posterior is None or \\ self.metric is", "np.swapaxes(jac_metric, 0, -1) inv_metric, sqrtm_metric = solve_psd(metric, return_chol=True) grad_logdet_metric =", "return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior = log_posterior self.grad_log_posterior", "solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog state uses the Fisher", "grad_log_posterior, jac_metric, grad_logdet_metric) def clear(self): super().clear() del self.jac_metric del self.grad_logdet_metric", "@property def jac_metric(self): return self._jac_metric @jac_metric.setter def jac_metric(self, value): self._jac_metric", "jac_metric self.grad_logdet_metric = grad_logdet_metric self.velocity = riemannian.velocity(inv_metric, self.momentum) self.force =", "def clear(self): super().clear() del self.jac_metric del self.grad_logdet_metric del self.metric del", "as np from hmc.integrators.states.leapfrog_state import LeapfrogState from hmc.integrators.fields import riemannian", "= grad_log_posterior self.metric = metric self.sqrtm_metric = sqrtm_metric self.inv_metric =", "case. \"\"\" def __init__(self, position: np.ndarray, momentum: np.ndarray): super().__init__(position, momentum)", "@grad_logdet_metric.setter def grad_logdet_metric(self, value): self._grad_logdet_metric = value @grad_logdet_metric.deleter def grad_logdet_metric(self):", "del self.grad_logdet_metric del self.metric del self.inv_metric del self.logdet_metric del self.sqrtm_metric", "gradients of the Hamiltonian requires higher derivatives of the metric,", "\\ self.inv_metric is None or \\ self.jac_metric is None or", "grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior = log_posterior self.grad_log_posterior =", "vanish in the Euclidean case. \"\"\" def __init__(self, position: np.ndarray,", "= riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric) def clear(self): super().clear() del self.jac_metric", "self.grad_logdet_metric = grad_logdet_metric self.velocity = riemannian.velocity(inv_metric, self.momentum) self.force = riemannian.force(self.velocity,", "jac_metric, num_dims) self.log_posterior = log_posterior self.grad_log_posterior = grad_log_posterior self.metric =", "= log_posterior self.grad_log_posterior = grad_log_posterior self.metric = metric self.sqrtm_metric =", "del self.jac_metric del self.grad_logdet_metric del self.metric del self.inv_metric del self.logdet_metric", "@grad_logdet_metric.deleter def grad_logdet_metric(self): del self._grad_logdet_metric def update(self, auxiliaries: Callable): num_dims", "grad_logdet_metric) def clear(self): super().clear() del self.jac_metric del self.grad_logdet_metric del self.metric", "hmc.integrators.states.leapfrog_state import LeapfrogState from hmc.integrators.fields import riemannian from hmc.linalg import", "class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog state uses the Fisher information", "metric, jac_metric = auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric, 0, -1) inv_metric,", "self.jac_metric = jac_metric self.grad_logdet_metric = grad_logdet_metric self.velocity = riemannian.velocity(inv_metric, self.momentum)", "self.momentum) self.force = riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric) def clear(self): super().clear()", "= np.swapaxes(jac_metric, 0, -1) inv_metric, sqrtm_metric = solve_psd(metric, return_chol=True) grad_logdet_metric", "__init__(self, position: np.ndarray, momentum: np.ndarray): super().__init__(position, momentum) self._jac_metric: np.ndarray self._grad_logdet_metric:", "from typing import Callable import numpy as np from hmc.integrators.states.leapfrog_state", "inv_metric, sqrtm_metric = solve_psd(metric, return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric, num_dims)", "import riemannian from hmc.linalg import solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian", "or \\ self.jac_metric is None or \\ self.grad_logdet_metric is None", "np.ndarray): super().__init__(position, momentum) self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray @property def requires_update(self)", "is None return o @property def jac_metric(self): return self._jac_metric @jac_metric.setter", "position-dependent Riemannian metric. As such, computing the gradients of the", "metric. As such, computing the gradients of the Hamiltonian requires", "super().clear() del self.jac_metric del self.grad_logdet_metric del self.metric del self.inv_metric del", "of the Hamiltonian requires higher derivatives of the metric, which", "from hmc.linalg import solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog state", "jac_metric = np.swapaxes(jac_metric, 0, -1) inv_metric, sqrtm_metric = solve_psd(metric, return_chol=True)", "the Euclidean case. \"\"\" def __init__(self, position: np.ndarray, momentum: np.ndarray):", "np from hmc.integrators.states.leapfrog_state import LeapfrogState from hmc.integrators.fields import riemannian from", "from hmc.integrators.fields import riemannian from hmc.linalg import solve_psd class RiemannianLeapfrogState(LeapfrogState):", "@property def grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self, value): self._grad_logdet_metric", "= grad_logdet_metric self.velocity = riemannian.velocity(inv_metric, self.momentum) self.force = riemannian.force(self.velocity, grad_log_posterior,", "metric, which vanish in the Euclidean case. \"\"\" def __init__(self,", "grad_logdet_metric(self): del self._grad_logdet_metric def update(self, auxiliaries: Callable): num_dims = len(self.position)", "Riemannian leapfrog state uses the Fisher information matrix to provide", "jac_metric(self): return self._jac_metric @jac_metric.setter def jac_metric(self, value): self._jac_metric = value", "= riemannian.velocity(inv_metric, self.momentum) self.force = riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric) def", "import LeapfrogState from hmc.integrators.fields import riemannian from hmc.linalg import solve_psd", "value): self._grad_logdet_metric = value @grad_logdet_metric.deleter def grad_logdet_metric(self): del self._grad_logdet_metric def", "self.metric = metric self.sqrtm_metric = sqrtm_metric self.inv_metric = inv_metric self.jac_metric", "clear(self): super().clear() del self.jac_metric del self.grad_logdet_metric del self.metric del self.inv_metric", "def jac_metric(self, value): self._jac_metric = value @jac_metric.deleter def jac_metric(self): del", "None or \\ self.jac_metric is None or \\ self.grad_logdet_metric is", "self.jac_metric del self.grad_logdet_metric del self.metric del self.inv_metric del self.logdet_metric del", "from hmc.integrators.states.leapfrog_state import LeapfrogState from hmc.integrators.fields import riemannian from hmc.linalg", "self.velocity = riemannian.velocity(inv_metric, self.momentum) self.force = riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric)", "import solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog state uses the", "which vanish in the Euclidean case. \"\"\" def __init__(self, position:", "in the Euclidean case. \"\"\" def __init__(self, position: np.ndarray, momentum:", "self._jac_metric @property def grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self, value):", "def requires_update(self) -> bool: o = self.log_posterior is None or", "= value @grad_logdet_metric.deleter def grad_logdet_metric(self): del self._grad_logdet_metric def update(self, auxiliaries:", "\\ self.grad_logdet_metric is None return o @property def jac_metric(self): return", "riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior = log_posterior self.grad_log_posterior = grad_log_posterior self.metric", "sqrtm_metric self.inv_metric = inv_metric self.jac_metric = jac_metric self.grad_logdet_metric = grad_logdet_metric", "self.grad_log_posterior is None or \\ self.metric is None or \\", "value @grad_logdet_metric.deleter def grad_logdet_metric(self): del self._grad_logdet_metric def update(self, auxiliaries: Callable):", "return o @property def jac_metric(self): return self._jac_metric @jac_metric.setter def jac_metric(self,", "None or \\ self.metric is None or \\ self.inv_metric is", "= self.log_posterior is None or \\ self.grad_log_posterior is None or", "is None or \\ self.inv_metric is None or \\ self.jac_metric", "As such, computing the gradients of the Hamiltonian requires higher", "grad_logdet_metric(self, value): self._grad_logdet_metric = value @grad_logdet_metric.deleter def grad_logdet_metric(self): del self._grad_logdet_metric", "= riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior = log_posterior self.grad_log_posterior = grad_log_posterior", "position: np.ndarray, momentum: np.ndarray): super().__init__(position, momentum) self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray", "@jac_metric.setter def jac_metric(self, value): self._jac_metric = value @jac_metric.deleter def jac_metric(self):", "matrix to provide a position-dependent Riemannian metric. As such, computing", "@jac_metric.deleter def jac_metric(self): del self._jac_metric @property def grad_logdet_metric(self): return self._grad_logdet_metric", "higher derivatives of the metric, which vanish in the Euclidean", "return self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self, value): self._grad_logdet_metric = value @grad_logdet_metric.deleter", "is None or \\ self.metric is None or \\ self.inv_metric", "def update(self, auxiliaries: Callable): num_dims = len(self.position) log_posterior, grad_log_posterior, metric,", "of the metric, which vanish in the Euclidean case. \"\"\"", "= solve_psd(metric, return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior =", "grad_logdet_metric self.velocity = riemannian.velocity(inv_metric, self.momentum) self.force = riemannian.force(self.velocity, grad_log_posterior, jac_metric,", "is None or \\ self.jac_metric is None or \\ self.grad_logdet_metric", "the Fisher information matrix to provide a position-dependent Riemannian metric.", "\"\"\"The Riemannian leapfrog state uses the Fisher information matrix to", "\"\"\" def __init__(self, position: np.ndarray, momentum: np.ndarray): super().__init__(position, momentum) self._jac_metric:", "provide a position-dependent Riemannian metric. As such, computing the gradients", "or \\ self.metric is None or \\ self.inv_metric is None", "o @property def jac_metric(self): return self._jac_metric @jac_metric.setter def jac_metric(self, value):", "computing the gradients of the Hamiltonian requires higher derivatives of", "grad_log_posterior self.metric = metric self.sqrtm_metric = sqrtm_metric self.inv_metric = inv_metric", "LeapfrogState from hmc.integrators.fields import riemannian from hmc.linalg import solve_psd class", "\\ self.metric is None or \\ self.inv_metric is None or", "-> bool: o = self.log_posterior is None or \\ self.grad_log_posterior", "@property def requires_update(self) -> bool: o = self.log_posterior is None", "self._jac_metric = value @jac_metric.deleter def jac_metric(self): del self._jac_metric @property def", "requires_update(self) -> bool: o = self.log_posterior is None or \\", "None return o @property def jac_metric(self): return self._jac_metric @jac_metric.setter def", "num_dims) self.log_posterior = log_posterior self.grad_log_posterior = grad_log_posterior self.metric = metric", "num_dims = len(self.position) log_posterior, grad_log_posterior, metric, jac_metric = auxiliaries(self.position) jac_metric", "import numpy as np from hmc.integrators.states.leapfrog_state import LeapfrogState from hmc.integrators.fields", "Callable): num_dims = len(self.position) log_posterior, grad_log_posterior, metric, jac_metric = auxiliaries(self.position)", "np.ndarray, momentum: np.ndarray): super().__init__(position, momentum) self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray @property", "solve_psd(metric, return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior = log_posterior", "\\ self.grad_log_posterior is None or \\ self.metric is None or", "def jac_metric(self): return self._jac_metric @jac_metric.setter def jac_metric(self, value): self._jac_metric =", "value): self._jac_metric = value @jac_metric.deleter def jac_metric(self): del self._jac_metric @property", "log_posterior, grad_log_posterior, metric, jac_metric = auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric, 0,", "del self._grad_logdet_metric def update(self, auxiliaries: Callable): num_dims = len(self.position) log_posterior,", "riemannian from hmc.linalg import solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog", "leapfrog state uses the Fisher information matrix to provide a", "del self._jac_metric @property def grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self,", "0, -1) inv_metric, sqrtm_metric = solve_psd(metric, return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric,", "jac_metric = auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric, 0, -1) inv_metric, sqrtm_metric", "self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self, value): self._grad_logdet_metric = value @grad_logdet_metric.deleter def", "inv_metric self.jac_metric = jac_metric self.grad_logdet_metric = grad_logdet_metric self.velocity = riemannian.velocity(inv_metric,", "self._jac_metric @jac_metric.setter def jac_metric(self, value): self._jac_metric = value @jac_metric.deleter def", "self.force = riemannian.force(self.velocity, grad_log_posterior, jac_metric, grad_logdet_metric) def clear(self): super().clear() del", "Callable import numpy as np from hmc.integrators.states.leapfrog_state import LeapfrogState from", "bool: o = self.log_posterior is None or \\ self.grad_log_posterior is", "grad_log_posterior, metric, jac_metric = auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric, 0, -1)", "numpy as np from hmc.integrators.states.leapfrog_state import LeapfrogState from hmc.integrators.fields import", "\\ self.jac_metric is None or \\ self.grad_logdet_metric is None return", "def grad_logdet_metric(self, value): self._grad_logdet_metric = value @grad_logdet_metric.deleter def grad_logdet_metric(self): del", "self._grad_logdet_metric: np.ndarray @property def requires_update(self) -> bool: o = self.log_posterior", "is None or \\ self.grad_log_posterior is None or \\ self.metric", "def grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self, value): self._grad_logdet_metric =", "= len(self.position) log_posterior, grad_log_posterior, metric, jac_metric = auxiliaries(self.position) jac_metric =", "self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray @property def requires_update(self) -> bool: o", "metric self.sqrtm_metric = sqrtm_metric self.inv_metric = inv_metric self.jac_metric = jac_metric", "jac_metric(self): del self._jac_metric @property def grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter def", "grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter def grad_logdet_metric(self, value): self._grad_logdet_metric = value", "momentum) self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray @property def requires_update(self) -> bool:", "= auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric, 0, -1) inv_metric, sqrtm_metric =", "requires higher derivatives of the metric, which vanish in the", "momentum: np.ndarray): super().__init__(position, momentum) self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray @property def", "self.log_posterior is None or \\ self.grad_log_posterior is None or \\", "return self._jac_metric @jac_metric.setter def jac_metric(self, value): self._jac_metric = value @jac_metric.deleter", "len(self.position) log_posterior, grad_log_posterior, metric, jac_metric = auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric,", "typing import Callable import numpy as np from hmc.integrators.states.leapfrog_state import", "auxiliaries: Callable): num_dims = len(self.position) log_posterior, grad_log_posterior, metric, jac_metric =", "log_posterior self.grad_log_posterior = grad_log_posterior self.metric = metric self.sqrtm_metric = sqrtm_metric", "self.inv_metric = inv_metric self.jac_metric = jac_metric self.grad_logdet_metric = grad_logdet_metric self.velocity", "derivatives of the metric, which vanish in the Euclidean case.", "= metric self.sqrtm_metric = sqrtm_metric self.inv_metric = inv_metric self.jac_metric =", "the metric, which vanish in the Euclidean case. \"\"\" def", "def jac_metric(self): del self._jac_metric @property def grad_logdet_metric(self): return self._grad_logdet_metric @grad_logdet_metric.setter", "self._grad_logdet_metric def update(self, auxiliaries: Callable): num_dims = len(self.position) log_posterior, grad_log_posterior,", "o = self.log_posterior is None or \\ self.grad_log_posterior is None", "np.ndarray @property def requires_update(self) -> bool: o = self.log_posterior is", "None or \\ self.grad_logdet_metric is None return o @property def", "RiemannianLeapfrogState(LeapfrogState): \"\"\"The Riemannian leapfrog state uses the Fisher information matrix", "super().__init__(position, momentum) self._jac_metric: np.ndarray self._grad_logdet_metric: np.ndarray @property def requires_update(self) ->", "Euclidean case. \"\"\" def __init__(self, position: np.ndarray, momentum: np.ndarray): super().__init__(position,", "hmc.integrators.fields import riemannian from hmc.linalg import solve_psd class RiemannianLeapfrogState(LeapfrogState): \"\"\"The", "self.jac_metric is None or \\ self.grad_logdet_metric is None return o", "is None or \\ self.grad_logdet_metric is None return o @property", "self._grad_logdet_metric = value @grad_logdet_metric.deleter def grad_logdet_metric(self): del self._grad_logdet_metric def update(self,", "= jac_metric self.grad_logdet_metric = grad_logdet_metric self.velocity = riemannian.velocity(inv_metric, self.momentum) self.force", "a position-dependent Riemannian metric. As such, computing the gradients of", "sqrtm_metric = solve_psd(metric, return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric, num_dims) self.log_posterior", "Fisher information matrix to provide a position-dependent Riemannian metric. As", "uses the Fisher information matrix to provide a position-dependent Riemannian", "None or \\ self.inv_metric is None or \\ self.jac_metric is", "to provide a position-dependent Riemannian metric. As such, computing the", "or \\ self.grad_logdet_metric is None return o @property def jac_metric(self):", "-1) inv_metric, sqrtm_metric = solve_psd(metric, return_chol=True) grad_logdet_metric = riemannian.grad_logdet(inv_metric, jac_metric,", "the gradients of the Hamiltonian requires higher derivatives of the", "update(self, auxiliaries: Callable): num_dims = len(self.position) log_posterior, grad_log_posterior, metric, jac_metric", "auxiliaries(self.position) jac_metric = np.swapaxes(jac_metric, 0, -1) inv_metric, sqrtm_metric = solve_psd(metric,", "or \\ self.grad_log_posterior is None or \\ self.metric is None", "= inv_metric self.jac_metric = jac_metric self.grad_logdet_metric = grad_logdet_metric self.velocity =", "self.sqrtm_metric = sqrtm_metric self.inv_metric = inv_metric self.jac_metric = jac_metric self.grad_logdet_metric", "self.log_posterior = log_posterior self.grad_log_posterior = grad_log_posterior self.metric = metric self.sqrtm_metric", "Hamiltonian requires higher derivatives of the metric, which vanish in" ]
[ "= [] cf = Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf) as scf:", "(don't list the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = [] cf", "2.0 around using your hands while it's hovering. This examples", "# This program is distributed in the hope that it", "by holding your hand above the Crazyflie. For the example", "if len(sys.argv) > 1: URI = sys.argv[1] # Only output", "# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #", "/ __ )(_) /_______________ _____ ___ # | 0xBC |", "\"push\" the Crazyflie 2.0 around using your hands while it's", "needed: * Crazyflie 2.0 * Crazyradio PA * Flow deck", "comes closer than 0.2m by setting a velocity in the", "WITHOUT ANY WARRANTY; without even the implied warranty of #", "51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA.", "Python Library # # This program is free software; you", "__ / / __/ ___/ ___/ __ `/_ / /", "by setting a velocity in the opposite direction. The demo", "__ # +------+ / __ )(_) /_______________ _____ ___ #", "/ / /_/ /__/ / / /_/ / / /_/", "/ / /_/ / / /_/ __/ # || ||", "Ctrl-C or by holding your hand above the Crazyflie. For", "is None: return False else: return range < MIN_DISTANCE if", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "import MotionCommander from cflib.utils.multiranger import Multiranger import matplotlib.pyplot as plt", "| / __ / / __/ ___/ ___/ __ `/_", "/_/ __/ # || || /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ # #", "import cflib.crtp from cflib.crazyflie import Crazyflie from cflib.crazyflie.syncCrazyflie import SyncCrazyflie", "version. # # This program is distributed in the hope", "MA 02110-1301, USA. \"\"\" Example scipts that allows a user", "== '__main__': # Initialize the low-level drivers (don't list the", "# Copyright (C) 2017 Bitcraze AB # # Crazyflie Python", "you can redistribute it and/or # modify it under the", "# of the License, or (at your option) any later", "in all directions and tries to keep away from anything", "Floor, Boston, # MA 02110-1301, USA. \"\"\" Example scipts that", "closer than 0.2m by setting a velocity in the opposite", "sys.argv[1] # Only output errors from the logging framework logging.basicConfig(level=logging.ERROR)", "\"\"\" import logging import sys import time import cflib.crtp from", "* Crazyflie 2.0 * Crazyradio PA * Flow deck *", "write to the Free Software # Foundation, Inc., 51 Franklin", "by either pressing Ctrl-C or by holding your hand above", "matplotlib.patches as patches URI = 'radio://0/80/2M' if len(sys.argv) > 1:", "0xBC | / __ / / __/ ___/ ___/ __", "program is free software; you can redistribute it and/or #", "0.2 # m if range is None: return False else:", "___/ ___/ __ `/_ / / _ \\ # +------+", "# # This program is free software; you can redistribute", "Crazyflie 2.0 around using your hands while it's hovering. This", "A PARTICULAR PURPOSE. See the # GNU General Public License", "__ `/_ / / _ \\ # +------+ / /_/", "Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf) as scf: with MotionCommander(scf) as motion_commander:", "* Multiranger deck \"\"\" import logging import sys import time", "as plt from matplotlib.pyplot import figure import matplotlib.patches as patches", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "Boston, # MA 02110-1301, USA. \"\"\" Example scipts that allows", "and tries to keep away from anything that comes closer", "0.2m by setting a velocity in the opposite direction. The", "* Flow deck * Multiranger deck \"\"\" import logging import", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "option) any later version. # # This program is distributed", "low-level drivers (don't list the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray =", "# # Crazyflie Python Library # # This program is", "2017 Bitcraze AB # # Crazyflie Python Library # #", "the GNU General Public License # as published by the", "/___/\\___/ # # Copyright (C) 2017 Bitcraze AB # #", "Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor,", "to run the following hardware is needed: * Crazyflie 2.0", "as motion_commander: with Multiranger(scf) as multiranger: motion_commander.start_turn_left(90) rangeArray.append(multiranger.front) time.sleep(0.05) plt.plot(rangeArray)", "from matplotlib.pyplot import figure import matplotlib.patches as patches URI =", "return False else: return range < MIN_DISTANCE if __name__ ==", "1: URI = sys.argv[1] # Only output errors from the", "cflib.utils.multiranger import Multiranger import matplotlib.pyplot as plt from matplotlib.pyplot import", "= sys.argv[1] # Only output errors from the logging framework", "___/ __ `/_ / / _ \\ # +------+ /", "if range is None: return False else: return range <", "framework logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE = 0.2 # m if", "figure import matplotlib.patches as patches URI = 'radio://0/80/2M' if len(sys.argv)", "/ /_/ __/ # || || /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ #", "len(sys.argv) > 1: URI = sys.argv[1] # Only output errors", "___ # | 0xBC | / __ / / __/", "For the example to run the following hardware is needed:", "decks to measure distances in all directions and tries to", "PARTICULAR PURPOSE. See the # GNU General Public License for", "# as published by the Free Software Foundation; either version", "= 'radio://0/80/2M' if len(sys.argv) > 1: URI = sys.argv[1] #", "Software Foundation; either version 2 # of the License, or", "program; if not, write to the Free Software # Foundation,", "matplotlib.pyplot import figure import matplotlib.patches as patches URI = 'radio://0/80/2M'", "__/ # || || /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ # # Copyright", "2 # of the License, or (at your option) any", "SyncCrazyflie from cflib.positioning.motion_commander import MotionCommander from cflib.utils.multiranger import Multiranger import", "your hand above the Crazyflie. For the example to run", "This program is free software; you can redistribute it and/or", "while it's hovering. This examples uses the Flow and Multi-ranger", "Example scipts that allows a user to \"push\" the Crazyflie", "utf-8 -*- # # || ____ _ __ # +------+", "/ / /_/ __/ # || || /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/", "the following hardware is needed: * Crazyflie 2.0 * Crazyradio", "import figure import matplotlib.patches as patches URI = 'radio://0/80/2M' if", "any later version. # # This program is distributed in", "scf: with MotionCommander(scf) as motion_commander: with Multiranger(scf) as multiranger: motion_commander.start_turn_left(90)", "`/_ / / _ \\ # +------+ / /_/ /", "modify it under the terms of the GNU General Public", "MotionCommander(scf) as motion_commander: with Multiranger(scf) as multiranger: motion_commander.start_turn_left(90) rangeArray.append(multiranger.front) time.sleep(0.05)", "General Public License # as published by the Free Software", "is ended by either pressing Ctrl-C or by holding your", "General Public License for more details. # You should have", "received a copy of the GNU General Public License #", "# Only output errors from the logging framework logging.basicConfig(level=logging.ERROR) def", "2.0 * Crazyradio PA * Flow deck * Multiranger deck", "to the Free Software # Foundation, Inc., 51 Franklin Street,", "time import cflib.crtp from cflib.crazyflie import Crazyflie from cflib.crazyflie.syncCrazyflie import", "velocity in the opposite direction. The demo is ended by", "See the # GNU General Public License for more details.", "coding: utf-8 -*- # # || ____ _ __ #", "Copyright (C) 2017 Bitcraze AB # # Crazyflie Python Library", "Flow deck * Multiranger deck \"\"\" import logging import sys", "'__main__': # Initialize the low-level drivers (don't list the debug", "# Crazyflie Python Library # # This program is free", "cflib.crazyflie.syncCrazyflie import SyncCrazyflie from cflib.positioning.motion_commander import MotionCommander from cflib.utils.multiranger import", "it and/or # modify it under the terms of the", "that it will be useful, # but WITHOUT ANY WARRANTY;", "debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = [] cf = Crazyflie(rw_cache='./cache') with", "keep away from anything that comes closer than 0.2m by", "pressing Ctrl-C or by holding your hand above the Crazyflie.", "your hands while it's hovering. This examples uses the Flow", "the low-level drivers (don't list the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray", "hovering. This examples uses the Flow and Multi-ranger decks to", "direction. The demo is ended by either pressing Ctrl-C or", "Library # # This program is free software; you can", "____ _ __ # +------+ / __ )(_) /_______________ _____", "away from anything that comes closer than 0.2m by setting", "be useful, # but WITHOUT ANY WARRANTY; without even the", "Free Software Foundation; either version 2 # of the License,", "and/or # modify it under the terms of the GNU", "/__/ / / /_/ / / /_/ __/ # ||", "/ __/ ___/ ___/ __ `/_ / / _ \\", "return range < MIN_DISTANCE if __name__ == '__main__': # Initialize", "it's hovering. This examples uses the Flow and Multi-ranger decks", "Bitcraze AB # # Crazyflie Python Library # # This", "from cflib.crazyflie import Crazyflie from cflib.crazyflie.syncCrazyflie import SyncCrazyflie from cflib.positioning.motion_commander", "examples uses the Flow and Multi-ranger decks to measure distances", "opposite direction. The demo is ended by either pressing Ctrl-C", "output errors from the logging framework logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE", "either version 2 # of the License, or (at your", "that allows a user to \"push\" the Crazyflie 2.0 around", "m if range is None: return False else: return range", "# +------+ / /_/ / / /_/ /__/ / /", "URI = sys.argv[1] # Only output errors from the logging", "software; you can redistribute it and/or # modify it under", "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA", "The demo is ended by either pressing Ctrl-C or by", "# # This program is distributed in the hope that", "This examples uses the Flow and Multi-ranger decks to measure", "/_/ / / /_/ /__/ / / /_/ / /", "user to \"push\" the Crazyflie 2.0 around using your hands", "Initialize the low-level drivers (don't list the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False)", "cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = [] cf = Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf)", "License for more details. # You should have received a", "/_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ # # Copyright (C) 2017 Bitcraze AB", "# Initialize the low-level drivers (don't list the debug drivers)", "or (at your option) any later version. # # This", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "This program is distributed in the hope that it will", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "Fifth Floor, Boston, # MA 02110-1301, USA. \"\"\" Example scipts", "/ _ \\ # +------+ / /_/ / / /_/", "run the following hardware is needed: * Crazyflie 2.0 *", "either pressing Ctrl-C or by holding your hand above the", "cflib.positioning.motion_commander import MotionCommander from cflib.utils.multiranger import Multiranger import matplotlib.pyplot as", "directions and tries to keep away from anything that comes", "sys import time import cflib.crtp from cflib.crazyflie import Crazyflie from", "_____ ___ # | 0xBC | / __ / /", "setting a velocity in the opposite direction. The demo is", "\"\"\" Example scipts that allows a user to \"push\" the", "to measure distances in all directions and tries to keep", "the Free Software Foundation; either version 2 # of the", "it will be useful, # but WITHOUT ANY WARRANTY; without", "# GNU General Public License for more details. # You", "License # along with this program; if not, write to", "the example to run the following hardware is needed: *", "URI = 'radio://0/80/2M' if len(sys.argv) > 1: URI = sys.argv[1]", "= Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf) as scf: with MotionCommander(scf) as", "with MotionCommander(scf) as motion_commander: with Multiranger(scf) as multiranger: motion_commander.start_turn_left(90) rangeArray.append(multiranger.front)", "redistribute it and/or # modify it under the terms of", "# || || /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ # # Copyright (C)", "AB # # Crazyflie Python Library # # This program", "hope that it will be useful, # but WITHOUT ANY", "it under the terms of the GNU General Public License", "Flow and Multi-ranger decks to measure distances in all directions", "plt from matplotlib.pyplot import figure import matplotlib.patches as patches URI", "'radio://0/80/2M' if len(sys.argv) > 1: URI = sys.argv[1] # Only", "program is distributed in the hope that it will be", "|| /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ # # Copyright (C) 2017 Bitcraze", "< MIN_DISTANCE if __name__ == '__main__': # Initialize the low-level", "the License, or (at your option) any later version. #", "__name__ == '__main__': # Initialize the low-level drivers (don't list", "Crazyflie 2.0 * Crazyradio PA * Flow deck * Multiranger", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "a user to \"push\" the Crazyflie 2.0 around using your", "errors from the logging framework logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE =", "in the opposite direction. The demo is ended by either", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # ||", "import matplotlib.patches as patches URI = 'radio://0/80/2M' if len(sys.argv) >", "<gh_stars>0 #!/usr/bin/env python3 # -*- coding: utf-8 -*- # #", "General Public License # along with this program; if not,", "will be useful, # but WITHOUT ANY WARRANTY; without even", "else: return range < MIN_DISTANCE if __name__ == '__main__': #", "-*- # # || ____ _ __ # +------+ /", "if not, write to the Free Software # Foundation, Inc.,", "# +------+ / __ )(_) /_______________ _____ ___ # |", "USA. \"\"\" Example scipts that allows a user to \"push\"", "to keep away from anything that comes closer than 0.2m", "above the Crazyflie. For the example to run the following", "all directions and tries to keep away from anything that", "PURPOSE. See the # GNU General Public License for more", "FOR A PARTICULAR PURPOSE. See the # GNU General Public", "the # GNU General Public License for more details. #", "Crazyflie Python Library # # This program is free software;", "/ / _ \\ # +------+ / /_/ / /", "version 2 # of the License, or (at your option)", "example to run the following hardware is needed: * Crazyflie", "uses the Flow and Multi-ranger decks to measure distances in", "Multiranger import matplotlib.pyplot as plt from matplotlib.pyplot import figure import", "the logging framework logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE = 0.2 #", "logging import sys import time import cflib.crtp from cflib.crazyflie import", "+------+ / /_/ / / /_/ /__/ / / /_/", "__/ ___/ ___/ __ `/_ / / _ \\ #", "Only output errors from the logging framework logging.basicConfig(level=logging.ERROR) def is_close(range):", "free software; you can redistribute it and/or # modify it", "your option) any later version. # # This program is", "# || ____ _ __ # +------+ / __ )(_)", "a velocity in the opposite direction. The demo is ended", "is free software; you can redistribute it and/or # modify", "Multi-ranger decks to measure distances in all directions and tries", ")(_) /_______________ _____ ___ # | 0xBC | / __", "along with this program; if not, write to the Free", "\\ # +------+ / /_/ / / /_/ /__/ /", "from cflib.crazyflie.syncCrazyflie import SyncCrazyflie from cflib.positioning.motion_commander import MotionCommander from cflib.utils.multiranger", "= 0.2 # m if range is None: return False", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "(at your option) any later version. # # This program", "cflib.crazyflie import Crazyflie from cflib.crazyflie.syncCrazyflie import SyncCrazyflie from cflib.positioning.motion_commander import", "the opposite direction. The demo is ended by either pressing", "GNU General Public License for more details. # You should", "License, or (at your option) any later version. # #", "the hope that it will be useful, # but WITHOUT", "measure distances in all directions and tries to keep away", "import time import cflib.crtp from cflib.crazyflie import Crazyflie from cflib.crazyflie.syncCrazyflie", "to \"push\" the Crazyflie 2.0 around using your hands while", "Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301,", "for more details. # You should have received a copy", "than 0.2m by setting a velocity in the opposite direction.", "|| || /_____/_/\\__/\\___/_/ \\__,_/ /___/\\___/ # # Copyright (C) 2017", "_ __ # +------+ / __ )(_) /_______________ _____ ___", "if __name__ == '__main__': # Initialize the low-level drivers (don't", "import matplotlib.pyplot as plt from matplotlib.pyplot import figure import matplotlib.patches", "hand above the Crazyflie. For the example to run the", "as patches URI = 'radio://0/80/2M' if len(sys.argv) > 1: URI", "around using your hands while it's hovering. This examples uses", "hardware is needed: * Crazyflie 2.0 * Crazyradio PA *", "Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,", "copy of the GNU General Public License # along with", "is_close(range): MIN_DISTANCE = 0.2 # m if range is None:", "cf=cf) as scf: with MotionCommander(scf) as motion_commander: with Multiranger(scf) as", "def is_close(range): MIN_DISTANCE = 0.2 # m if range is", "distributed in the hope that it will be useful, #", "demo is ended by either pressing Ctrl-C or by holding", "logging framework logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE = 0.2 # m", "patches URI = 'radio://0/80/2M' if len(sys.argv) > 1: URI =", "/ /_/ / / /_/ /__/ / / /_/ /", "of the License, or (at your option) any later version.", "from cflib.utils.multiranger import Multiranger import matplotlib.pyplot as plt from matplotlib.pyplot", "You should have received a copy of the GNU General", "with SyncCrazyflie(URI, cf=cf) as scf: with MotionCommander(scf) as motion_commander: with", "Street, Fifth Floor, Boston, # MA 02110-1301, USA. \"\"\" Example", "the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = [] cf = Crazyflie(rw_cache='./cache')", "import sys import time import cflib.crtp from cflib.crazyflie import Crazyflie", "# | 0xBC | / __ / / __/ ___/", "useful, # but WITHOUT ANY WARRANTY; without even the implied", "anything that comes closer than 0.2m by setting a velocity", "import SyncCrazyflie from cflib.positioning.motion_commander import MotionCommander from cflib.utils.multiranger import Multiranger", "Public License for more details. # You should have received", "allows a user to \"push\" the Crazyflie 2.0 around using", "later version. # # This program is distributed in the", "drivers (don't list the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = []", "under the terms of the GNU General Public License #", "matplotlib.pyplot as plt from matplotlib.pyplot import figure import matplotlib.patches as", "Multiranger deck \"\"\" import logging import sys import time import", "# You should have received a copy of the GNU", "deck * Multiranger deck \"\"\" import logging import sys import", "the terms of the GNU General Public License # as", "/ / __/ ___/ ___/ __ `/_ / / _", "more details. # You should have received a copy of", "cflib.crtp from cflib.crazyflie import Crazyflie from cflib.crazyflie.syncCrazyflie import SyncCrazyflie from", "MIN_DISTANCE if __name__ == '__main__': # Initialize the low-level drivers", "the Free Software # Foundation, Inc., 51 Franklin Street, Fifth", "list the debug drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = [] cf =", "should have received a copy of the GNU General Public", "range is None: return False else: return range < MIN_DISTANCE", "of the GNU General Public License # along with this", "deck \"\"\" import logging import sys import time import cflib.crtp", "that comes closer than 0.2m by setting a velocity in", "range < MIN_DISTANCE if __name__ == '__main__': # Initialize the", "* Crazyradio PA * Flow deck * Multiranger deck \"\"\"", "by the Free Software Foundation; either version 2 # of", "Crazyflie from cflib.crazyflie.syncCrazyflie import SyncCrazyflie from cflib.positioning.motion_commander import MotionCommander from", "Foundation; either version 2 # of the License, or (at", "details. # You should have received a copy of the", "| 0xBC | / __ / / __/ ___/ ___/", "Crazyflie. For the example to run the following hardware is", "GNU General Public License # as published by the Free", "import logging import sys import time import cflib.crtp from cflib.crazyflie", "of the GNU General Public License # as published by", "is needed: * Crazyflie 2.0 * Crazyradio PA * Flow", "the Flow and Multi-ranger decks to measure distances in all", "as scf: with MotionCommander(scf) as motion_commander: with Multiranger(scf) as multiranger:", "from cflib.positioning.motion_commander import MotionCommander from cflib.utils.multiranger import Multiranger import matplotlib.pyplot", "cf = Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf) as scf: with MotionCommander(scf)", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "\\__,_/ /___/\\___/ # # Copyright (C) 2017 Bitcraze AB #", "# This program is free software; you can redistribute it", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "python3 # -*- coding: utf-8 -*- # # || ____", "Public License # as published by the Free Software Foundation;", "and Multi-ranger decks to measure distances in all directions and", "-*- coding: utf-8 -*- # # || ____ _ __", "tries to keep away from anything that comes closer than", "this program; if not, write to the Free Software #", "rangeArray = [] cf = Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf) as", "/_______________ _____ ___ # | 0xBC | / __ /", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "drivers) cflib.crtp.init_drivers(enable_debug_driver=False) rangeArray = [] cf = Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI,", "License # as published by the Free Software Foundation; either", "/ /_/ / / /_/ __/ # || || /_____/_/\\__/\\___/_/", "GNU General Public License # along with this program; if", "holding your hand above the Crazyflie. For the example to", "None: return False else: return range < MIN_DISTANCE if __name__", "as published by the Free Software Foundation; either version 2", "[] cf = Crazyflie(rw_cache='./cache') with SyncCrazyflie(URI, cf=cf) as scf: with", "logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE = 0.2 # m if range", "ended by either pressing Ctrl-C or by holding your hand", "MIN_DISTANCE = 0.2 # m if range is None: return", "/_/ / / /_/ __/ # || || /_____/_/\\__/\\___/_/ \\__,_/", "/ /_/ /__/ / / /_/ / / /_/ __/", "from the logging framework logging.basicConfig(level=logging.ERROR) def is_close(range): MIN_DISTANCE = 0.2", "can redistribute it and/or # modify it under the terms", "import Multiranger import matplotlib.pyplot as plt from matplotlib.pyplot import figure", "scipts that allows a user to \"push\" the Crazyflie 2.0", "# modify it under the terms of the GNU General", "distances in all directions and tries to keep away from", "a copy of the GNU General Public License # along", "# -*- coding: utf-8 -*- # # || ____ _", "the GNU General Public License # along with this program;", "> 1: URI = sys.argv[1] # Only output errors from", "terms of the GNU General Public License # as published", "hands while it's hovering. This examples uses the Flow and", "import Crazyflie from cflib.crazyflie.syncCrazyflie import SyncCrazyflie from cflib.positioning.motion_commander import MotionCommander", "(C) 2017 Bitcraze AB # # Crazyflie Python Library #", "have received a copy of the GNU General Public License", "Public License # along with this program; if not, write", "PA * Flow deck * Multiranger deck \"\"\" import logging", "/ __ / / __/ ___/ ___/ __ `/_ /", "+------+ / __ )(_) /_______________ _____ ___ # | 0xBC", "in the hope that it will be useful, # but", "# along with this program; if not, write to the", "# # || ____ _ __ # +------+ / __", "published by the Free Software Foundation; either version 2 #", "the Crazyflie. For the example to run the following hardware", "Crazyradio PA * Flow deck * Multiranger deck \"\"\" import", "MotionCommander from cflib.utils.multiranger import Multiranger import matplotlib.pyplot as plt from", "# m if range is None: return False else: return", "_ \\ # +------+ / /_/ / / /_/ /__/", "|| ____ _ __ # +------+ / __ )(_) /_______________", "02110-1301, USA. \"\"\" Example scipts that allows a user to", "/_/ /__/ / / /_/ / / /_/ __/ #", "Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. \"\"\"", "using your hands while it's hovering. This examples uses the", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "or by holding your hand above the Crazyflie. For the", "# # Copyright (C) 2017 Bitcraze AB # # Crazyflie", "__ )(_) /_______________ _____ ___ # | 0xBC | /", "is distributed in the hope that it will be useful,", "with this program; if not, write to the Free Software", "the Crazyflie 2.0 around using your hands while it's hovering.", "SyncCrazyflie(URI, cf=cf) as scf: with MotionCommander(scf) as motion_commander: with Multiranger(scf)", "not, write to the Free Software # Foundation, Inc., 51", "False else: return range < MIN_DISTANCE if __name__ == '__main__':", "following hardware is needed: * Crazyflie 2.0 * Crazyradio PA", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "# MA 02110-1301, USA. \"\"\" Example scipts that allows a", "without even the implied warranty of # MERCHANTABILITY or FITNESS", "from anything that comes closer than 0.2m by setting a" ]
[ "USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES = ( (USER_TYPE_STAFF, _('Dev')), (USER_TYPE_ADMIN, _('Admin')),", "from django.utils.translation import ugettext_lazy as _ USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN", "= 'STAFF' USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES =", "import ugettext_lazy as _ USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN = 'ADMIN'", "USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES", "= 'ADMIN' USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES = ( (USER_TYPE_STAFF, _('Dev')),", "'ADMIN' USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES = ( (USER_TYPE_STAFF, _('Dev')), (USER_TYPE_ADMIN,", "= 'BARBER' USER_TYPE_CHOICES = ( (USER_TYPE_STAFF, _('Dev')), (USER_TYPE_ADMIN, _('Admin')), (USER_TYPE_BARBER,", "USER_TYPE_CHOICES = ( (USER_TYPE_STAFF, _('Dev')), (USER_TYPE_ADMIN, _('Admin')), (USER_TYPE_BARBER, _('Barber')), )", "django.utils.translation import ugettext_lazy as _ USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN =", "USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES = ( (USER_TYPE_STAFF,", "'STAFF' USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER = 'BARBER' USER_TYPE_CHOICES = (", "'BARBER' USER_TYPE_CHOICES = ( (USER_TYPE_STAFF, _('Dev')), (USER_TYPE_ADMIN, _('Admin')), (USER_TYPE_BARBER, _('Barber')),", "_ USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER = 'BARBER'", "as _ USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER =", "ugettext_lazy as _ USER_TYPE_STAFF = 'STAFF' USER_TYPE_ADMIN = 'ADMIN' USER_TYPE_BARBER" ]
[ "\"\"\" Chrome proxy test page for traffic over https. \"\"\"", "test page for traffic over https. \"\"\" def __init__(self): super(HTML5TestStorySet,", "# found in the LICENSE file. from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState", "__init__(self, url, page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\"", "for traffic over https. \"\"\" def __init__(self): super(HTML5TestStorySet, self).__init__() urls_list", "= [ 'http://html5test.com/', ] for url in urls_list: self.AddStory(HTML5TestPage(url, self))", "license that can be # found in the LICENSE file.", "import page as page_module from telemetry import story class HTML5TestPage(page_module.Page):", "story class HTML5TestPage(page_module.Page): def __init__(self, url, page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set,", "proxy test page for traffic over https. \"\"\" def __init__(self):", "the LICENSE file. from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.page import", "import ChromeProxySharedPageState from telemetry.page import page as page_module from telemetry", "self).__init__() urls_list = [ 'http://html5test.com/', ] for url in urls_list:", "import story class HTML5TestPage(page_module.Page): def __init__(self, url, page_set): super(HTML5TestPage, self).__init__(url=url,", "that can be # found in the LICENSE file. from", "code is governed by a BSD-style license that can be", "a BSD-style license that can be # found in the", "from telemetry.page import page as page_module from telemetry import story", "def __init__(self): super(HTML5TestStorySet, self).__init__() urls_list = [ 'http://html5test.com/', ] for", "class HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy test page for traffic over", "can be # found in the LICENSE file. from common.chrome_proxy_shared_page_state", "this source code is governed by a BSD-style license that", "# Copyright 2016 The Chromium Authors. All rights reserved. #", "traffic over https. \"\"\" def __init__(self): super(HTML5TestStorySet, self).__init__() urls_list =", "page as page_module from telemetry import story class HTML5TestPage(page_module.Page): def", "super(HTML5TestStorySet, self).__init__() urls_list = [ 'http://html5test.com/', ] for url in", "BSD-style license that can be # found in the LICENSE", "telemetry import story class HTML5TestPage(page_module.Page): def __init__(self, url, page_set): super(HTML5TestPage,", "by a BSD-style license that can be # found in", "governed by a BSD-style license that can be # found", "# Use of this source code is governed by a", "is governed by a BSD-style license that can be #", "file. from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.page import page as", "telemetry.page import page as page_module from telemetry import story class", "shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy test page for traffic", "Use of this source code is governed by a BSD-style", "page for traffic over https. \"\"\" def __init__(self): super(HTML5TestStorySet, self).__init__()", "class HTML5TestPage(page_module.Page): def __init__(self, url, page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState)", "LICENSE file. from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.page import page", "ChromeProxySharedPageState from telemetry.page import page as page_module from telemetry import", "urls_list = [ 'http://html5test.com/', ] for url in urls_list: self.AddStory(HTML5TestPage(url,", "of this source code is governed by a BSD-style license", "All rights reserved. # Use of this source code is", "rights reserved. # Use of this source code is governed", "from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.page import page as page_module", "be # found in the LICENSE file. from common.chrome_proxy_shared_page_state import", "found in the LICENSE file. from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from", "as page_module from telemetry import story class HTML5TestPage(page_module.Page): def __init__(self,", "2016 The Chromium Authors. All rights reserved. # Use of", "page_module from telemetry import story class HTML5TestPage(page_module.Page): def __init__(self, url,", "page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy", "page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy test page for", "HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy test page for traffic over https.", "The Chromium Authors. All rights reserved. # Use of this", "__init__(self): super(HTML5TestStorySet, self).__init__() urls_list = [ 'http://html5test.com/', ] for url", "source code is governed by a BSD-style license that can", "self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy test page", "Authors. All rights reserved. # Use of this source code", "HTML5TestPage(page_module.Page): def __init__(self, url, page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class", "Chromium Authors. All rights reserved. # Use of this source", "in the LICENSE file. from common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.page", "Chrome proxy test page for traffic over https. \"\"\" def", "over https. \"\"\" def __init__(self): super(HTML5TestStorySet, self).__init__() urls_list = [", "\"\"\" def __init__(self): super(HTML5TestStorySet, self).__init__() urls_list = [ 'http://html5test.com/', ]", "reserved. # Use of this source code is governed by", "super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\" Chrome proxy test", "from telemetry import story class HTML5TestPage(page_module.Page): def __init__(self, url, page_set):", "def __init__(self, url, page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet):", "url, page_set): super(HTML5TestPage, self).__init__(url=url, page_set=page_set, shared_page_state_class=ChromeProxySharedPageState) class HTML5TestStorySet(story.StorySet): \"\"\" Chrome", "https. \"\"\" def __init__(self): super(HTML5TestStorySet, self).__init__() urls_list = [ 'http://html5test.com/',", "Copyright 2016 The Chromium Authors. All rights reserved. # Use", "common.chrome_proxy_shared_page_state import ChromeProxySharedPageState from telemetry.page import page as page_module from" ]
[ "version, errors during fetching & executing \"\"\" import sqlite3 from", "is not None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif id is not", "integer, # monthly_visitations: integer # ) # database.command.create_websites_table = '''", "= 'SELECT user_id, password FROM users WHERE user_id = \\'{}\\';'", "# database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001', '123123a')) #", ") # ''' # database.command.add_website = 'INSERT INTO websites (url,", "# create a simple database with websites table that includes", "class DataBase: \"\"\" create a database connection to the SQLite", "# database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006', '123123a'))", ") # database.command.create_websites_table = ''' # CREATE TABLE IF NOT", "\"__main__\" == __name__: import os log_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt'", "NOT EXISTS websites ( # id INTEGER PRIMARY KEY AUTOINCREMENT,", "create as create_logger class Commands: create_users_table = ''' CREATE TABLE", "= database.get_user_credentials(id='14') # database.connection.commit() # database.connection.close() # print(user_credentials) # create", "text ); ''' add_user = 'INSERT INTO users (user_id, password)", "is not None: # return users[id][1:] # return users[-1][1:] def", "e: self.log.exception(e) def fetch(self, command=None, *args, **kw): if command is", "database = DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table)", "i: s.append(str(a)) f.write(','.join(s) + '\\n') except Exception as e: self.log.exception(e)", "if rows is not None: for r in rows: self.log.info(r)", "'\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log = create_logger(log_file=log_file) database", "create_logger(log_file=log_file) database = DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) #", "None: # for i in users: # if user in", "f: f.write(','.join(titles) + '\\n') for i in table_list: s =", "simple database with websites table that includes ( # url:", "database connection to the SQLite database specified by db_file :param", "# database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org',", "users[id][1:] # return users[-1][1:] def get_user_credentials(self, user=None, id=None): if user", "# database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a')) #", "database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000',", "# database.connection.commit() # database.connection.close() # print(user_credentials) # create a simple", "class DataBaseExtention(DataBase): # def get_user_credentials(self, user=None, id=None): # users =", "# return users[-1][1:] def get_user_credentials(self, user=None, id=None): if user is", "log = create_logger(log_file=log_file) database = DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ #", "user_id & password logger showing connection logs, DB version, errors", "database specified by db_file :param db_file: database file \"\"\" def", "# print(user_credentials) # create a simple database with websites table", "# ) # database.command.create_websites_table = ''' # CREATE TABLE IF", "popularity_score, monthly_visitations FROM websites WHERE url = \\'{}\\';' # url,", "# users = self.fetch(self.command.get_users) # if user is not None:", "f.write(','.join(s) + '\\n') except Exception as e: self.log.exception(e) def fetch_log(self,", "database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001',", "to database') self.connection = sqlite3.connect(db_file) self.cursor = self.connection.cursor() self.log.info('connection success')", "rows = self.fetch(*args, **kw) if rows is not None: for", "FROM users WHERE id = \\'{}\\';''' get_last_user = 'SELECT user_id,", "os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log = create_logger(log_file=log_file) database = DataBaseExtention(db_file, log)", "{};' class DataBase: \"\"\" create a database connection to the", "None: for r in rows: self.log.info(r) return rows class DataBaseExtention(DataBase):", "get_user_credentials(self, user=None, id=None): if user is not None: user_credentials =", "not None: # return users[id][1:] # return users[-1][1:] def get_user_credentials(self,", "= os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log", "table_list = self.cursor.fetchall() with open(file_name, permission) as f: f.write(','.join(titles) +", "database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5,", "TABLE IF EXISTS {};' class DataBase: \"\"\" create a database", "Exception as e: self.log.exception(e) def fetch(self, command=None, *args, **kw): if", "database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004',", "except Exception as e: self.log.exception(e) def fetch_log(self, *args, **kw): rows", "WHERE id = \\'{}\\';''' get_last_user = 'SELECT user_id, password FROM", "users WHERE id = \\'{}\\';''' get_last_user = 'SELECT user_id, password", "IF EXISTS {};' class DataBase: \"\"\" create a database connection", "Commands: create_users_table = ''' CREATE TABLE IF NOT EXISTS users", "INTEGER PRIMARY KEY AUTOINCREMENT, user_id text, password text ); '''", "database.connection.close() # print(user_credentials) # create a simple database with websites", "varchar(1024), # popularity_score: integer, # monthly_visitations: integer # ) #", "users;' get_user_by_user_id = 'SELECT user_id, password FROM users WHERE user_id", "'\\n') for i in table_list: s = [] for a", "titles, permission='w'): try: self.cursor.execute(\"select * from {}\".format(table)) table_list = self.cursor.fetchall()", "visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url, popularity, visitations) database.export_from_table_to_file( table='websites',", "if user is not None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif id", "DataBaseExtention(DataBase): # def get_user_credentials(self, user=None, id=None): # users = self.fetch(self.command.get_users)", "# return i # if id is not None: #", "# url TEXT, # popularity_score INTEGER, # monthly_visitations INTEGER #", "# ''' # database.command.add_website = 'INSERT INTO websites (url, popularity_score,", "url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url, popularity, visitations)", "class Commands: create_users_table = ''' CREATE TABLE IF NOT EXISTS", "CREATE TABLE IF NOT EXISTS websites ( # id INTEGER", "LIMIT 1' drop_table = 'DROP TABLE IF EXISTS {};' class", "None: # return users[id][1:] # return users[-1][1:] def get_user_credentials(self, user=None,", "= 'SELECT user_id, password FROM users WHERE id = \\'{}\\';'''", "'INSERT INTO users (user_id, password) VALUES (\\'{}\\', \\'{}\\');' get_users =", "user_id, password FROM users ORDER BY ID DESC LIMIT 1'", "get_users = 'SELECT user_id, password FROM users;' get_user_by_user_id = 'SELECT", "{}'.format(sqlite3.version)) if commands is None: commands = Commands self.command =", "elif id is not None: user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials", "None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif id is not None: user_credentials", "integer # ) # database.command.create_websites_table = ''' # CREATE TABLE", "user in i: # return i # if id is", "in i: s.append(str(a)) f.write(','.join(s) + '\\n') except Exception as e:", "database with websites table that includes ( # url: varchar(1024),", "self.fetch(*args, **kw) if rows is not None: for r in", "); ''' add_user = 'INSERT INTO users (user_id, password) VALUES", "= self.fetch(self.command.get_last_user) if len(user_credentials) > 0: return user_credentials[0] if \"__main__\"", "in users: # if user in i: # return i", "e: self.log.exception(e) raise Exception(e) def execute(self, command, *args, **kwargs): try:", "SQLite database specified by db_file :param db_file: database file \"\"\"", "in table_list: s = [] for a in i: s.append(str(a))", "# if user in i: # return i # if", "None: user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials = self.fetch(self.command.get_last_user) if len(user_credentials)", "commands = Commands self.command = commands except Exception as e:", "\"\"\" def __init__(self, db_file, log, commands=None): \"\"\" database connection \"\"\"", "not None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif id is not None:", "'123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001', '123123a'))", "def fetch(self, command=None, *args, **kw): if command is not None:", "# # print(url, popularity, visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id', 'url',", "+ '\\n') except Exception as e: self.log.exception(e) def fetch_log(self, *args,", "open(file_name, permission) as f: f.write(','.join(titles) + '\\n') for i in", "during fetching & executing \"\"\" import sqlite3 from lessons.sqlite_example.log import", "print(user_credentials) # create a simple database with websites table that", "VALUES (\\'{}\\', \\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000))", "Commands self.command = commands except Exception as e: self.log.exception(e) raise", "return rows class DataBaseExtention(DataBase): # def get_user_credentials(self, user=None, id=None): #", "permission) as f: f.write(','.join(titles) + '\\n') for i in table_list:", "as e: self.log.exception(e) def export_from_table_to_file(self, table, file_name, titles, permission='w'): try:", "permission='w'): try: self.cursor.execute(\"select * from {}\".format(table)) table_list = self.cursor.fetchall() with", "os log_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__)) +", "if commands is None: commands = Commands self.command = commands", "# monthly_visitations: integer # ) # database.command.create_websites_table = ''' #", "id is not None: # return users[id][1:] # return users[-1][1:]", "user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials = self.fetch(self.command.get_last_user) if len(user_credentials) >", "users ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id text, password", "websites WHERE url = \\'{}\\';' # url, popularity, visitations =", "# database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a')) #", "= ''' CREATE TABLE IF NOT EXISTS users ( id", "= \\'{}\\';' get_user_by_id = 'SELECT user_id, password FROM users WHERE", "popularity_score: integer, # monthly_visitations: integer # ) # database.command.create_websites_table =", "self.log.info('connecting to database') self.connection = sqlite3.connect(db_file) self.cursor = self.connection.cursor() self.log.info('connection", "sqlite3 from lessons.sqlite_example.log import create as create_logger class Commands: create_users_table", "database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url, popularity, visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id',", "# if id is not None: # return users[id][1:] #", "commands is None: commands = Commands self.command = commands except", "r in rows: self.log.info(r) return rows class DataBaseExtention(DataBase): # def", "version {}'.format(sqlite3.version)) if commands is None: commands = Commands self.command", "self.log.info(r) return rows class DataBaseExtention(DataBase): # def get_user_credentials(self, user=None, id=None):", "[] for a in i: s.append(str(a)) f.write(','.join(s) + '\\n') except", "5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000))", "fetch_log(self, *args, **kw): rows = self.fetch(*args, **kw) if rows is", "0: return user_credentials[0] if \"__main__\" == __name__: import os log_file", "if command is not None: self.execute(command) try: return self.cursor.fetchall() except", "rows: self.log.info(r) return rows class DataBaseExtention(DataBase): # def get_user_credentials(self, user=None,", "self.log.exception(e) def fetch_log(self, *args, **kw): rows = self.fetch(*args, **kw) if", "= self.cursor.fetchall() with open(file_name, permission) as f: f.write(','.join(titles) + '\\n')", "1000000)) # database.command.get_site = 'SELECT url, popularity_score, monthly_visitations FROM websites", "in rows: self.log.info(r) return rows class DataBaseExtention(DataBase): # def get_user_credentials(self,", "# database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com',", "connection to the SQLite database specified by db_file :param db_file:", "self.command = commands except Exception as e: self.log.exception(e) raise Exception(e)", "id=None): # users = self.fetch(self.command.get_users) # if user is not", "ID DESC LIMIT 1' drop_table = 'DROP TABLE IF EXISTS", "users WHERE user_id = \\'{}\\';' get_user_by_id = 'SELECT user_id, password", "4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) #", "def execute(self, command, *args, **kwargs): try: return self.cursor.execute(command) except Exception", "# database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) # database.command.get_site = 'SELECT url, popularity_score,", "\\'{}\\');' # database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3,", "FROM websites WHERE url = \\'{}\\';' # url, popularity, visitations", "WHERE url = \\'{}\\';' # url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0]", "visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id', 'url', 'popularity_score', 'monthly_visitations') ) #", "( # id INTEGER PRIMARY KEY AUTOINCREMENT, # url TEXT,", "password FROM users WHERE id = \\'{}\\';''' get_last_user = 'SELECT", "'123123a')) # user_credentials = database.get_user_credentials(id='14') # database.connection.commit() # database.connection.close() #", "as e: self.log.exception(e) def fetch(self, command=None, *args, **kw): if command", "not None: user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials = self.fetch(self.command.get_last_user) if", "KEY AUTOINCREMENT, user_id text, password text ); ''' add_user =", "IF NOT EXISTS users ( id INTEGER PRIMARY KEY AUTOINCREMENT,", "3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000))", "FROM users;' get_user_by_user_id = 'SELECT user_id, password FROM users WHERE", "def export_from_table_to_file(self, table, file_name, titles, permission='w'): try: self.cursor.execute(\"select * from", "password FROM users WHERE user_id = \\'{}\\';' get_user_by_id = 'SELECT", "self.log.exception(e) raise Exception(e) def execute(self, command, *args, **kwargs): try: return", "(\\'{}\\', \\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) #", "execute(self, command, *args, **kwargs): try: return self.cursor.execute(command) except Exception as", "monthly_visitations FROM websites WHERE url = \\'{}\\';' # url, popularity,", "database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id', 'url', 'popularity_score', 'monthly_visitations') ) # database.connection.commit()", "+ '\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log = create_logger(log_file=log_file)", "except Exception as e: self.log.exception(e) raise Exception(e) def execute(self, command,", "get_user_by_id = 'SELECT user_id, password FROM users WHERE id =", "success') self.log.info('sqlite3 version {}'.format(sqlite3.version)) if commands is None: commands =", "popularity, visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id', 'url', 'popularity_score', 'monthly_visitations') )", "popularity_score, monthly_visitations) VALUES (\\'{}\\', \\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com',", "database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6,", "user_id text, password text ); ''' add_user = 'INSERT INTO", "log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a'))", "self.cursor.execute(command) except Exception as e: self.log.exception(e) def fetch(self, command=None, *args,", "url = \\'{}\\';' # url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] #", "url, popularity_score, monthly_visitations FROM websites WHERE url = \\'{}\\';' #", "'123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a'))", "# id INTEGER PRIMARY KEY AUTOINCREMENT, # url TEXT, #", "import create as create_logger class Commands: create_users_table = ''' CREATE", "DESC LIMIT 1' drop_table = 'DROP TABLE IF EXISTS {};'", "= self.fetch(*args, **kw) if rows is not None: for r", "== __name__: import os log_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file", "''' # CREATE TABLE IF NOT EXISTS websites ( #", "users = self.fetch(self.command.get_users) # if user is not None: #", "& password logger showing connection logs, DB version, errors during", "user credentials database with: user_id & password logger showing connection", "lessons.sqlite_example.log import create as create_logger class Commands: create_users_table = '''", "url: varchar(1024), # popularity_score: integer, # monthly_visitations: integer # )", "= 'SELECT user_id, password FROM users ORDER BY ID DESC", "add_user = 'INSERT INTO users (user_id, password) VALUES (\\'{}\\', \\'{}\\');'", "self.cursor = self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3 version {}'.format(sqlite3.version)) if commands", "'\\n') except Exception as e: self.log.exception(e) def fetch_log(self, *args, **kw):", "= create_logger(log_file=log_file) database = DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users'))", "log self.log.info('connecting to database') self.connection = sqlite3.connect(db_file) self.cursor = self.connection.cursor()", "'123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials = database.get_user_credentials(id='14') # database.connection.commit()", "users (user_id, password) VALUES (\\'{}\\', \\'{}\\');' get_users = 'SELECT user_id,", "( # url: varchar(1024), # popularity_score: integer, # monthly_visitations: integer", "# def get_user_credentials(self, user=None, id=None): # users = self.fetch(self.command.get_users) #", "# database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) # database.command.get_site", "user is not None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif id is", "else: user_credentials = self.fetch(self.command.get_last_user) if len(user_credentials) > 0: return user_credentials[0]", "with open(file_name, permission) as f: f.write(','.join(titles) + '\\n') for i", "return self.cursor.execute(command) except Exception as e: self.log.exception(e) def fetch(self, command=None,", "database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials", "TABLE IF NOT EXISTS users ( id INTEGER PRIMARY KEY", "specified by db_file :param db_file: database file \"\"\" def __init__(self,", "return self.cursor.fetchall() except Exception as e: self.log.exception(e) def export_from_table_to_file(self, table,", "is not None: self.execute(command) try: return self.cursor.fetchall() except Exception as", "create_logger class Commands: create_users_table = ''' CREATE TABLE IF NOT", "= 'SELECT user_id, password FROM users;' get_user_by_user_id = 'SELECT user_id,", "example we want to create a user credentials database with:", "self.log = log self.log.info('connecting to database') self.connection = sqlite3.connect(db_file) self.cursor", "database.command.get_site = 'SELECT url, popularity_score, monthly_visitations FROM websites WHERE url", "user_id, password FROM users WHERE id = \\'{}\\';''' get_last_user =", "database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001',", "users ORDER BY ID DESC LIMIT 1' drop_table = 'DROP", "# for i in users: # if user in i:", "users[-1][1:] def get_user_credentials(self, user=None, id=None): if user is not None:", "\\'{}\\';''' get_last_user = 'SELECT user_id, password FROM users ORDER BY", "create_users_table = ''' CREATE TABLE IF NOT EXISTS users (", "want to create a user credentials database with: user_id &", "'SELECT user_id, password FROM users;' get_user_by_user_id = 'SELECT user_id, password", "a in i: s.append(str(a)) f.write(','.join(s) + '\\n') except Exception as", "= database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url, popularity, visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv',", "# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a')) #", "db_file: database file \"\"\" def __init__(self, db_file, log, commands=None): \"\"\"", "self.fetch(self.command.get_user_by_user_id.format(user)) elif id is not None: user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else:", "EXISTS users ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id text,", "log, commands=None): \"\"\" database connection \"\"\" try: self.log = log", "as e: self.log.exception(e) raise Exception(e) def execute(self, command, *args, **kwargs):", "id=None): if user is not None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif", "> 0: return user_credentials[0] if \"__main__\" == __name__: import os", "fetch(self, command=None, *args, **kw): if command is not None: self.execute(command)", "id is not None: user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials =", "Exception as e: self.log.exception(e) raise Exception(e) def execute(self, command, *args,", "in this example we want to create a user credentials", "def fetch_log(self, *args, **kw): rows = self.fetch(*args, **kw) if rows", "not None: self.execute(command) try: return self.cursor.fetchall() except Exception as e:", "database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a'))", "table_list: s = [] for a in i: s.append(str(a)) f.write(','.join(s)", "5000000)) # database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) #", "__init__(self, db_file, log, commands=None): \"\"\" database connection \"\"\" try: self.log", "to create a user credentials database with: user_id & password", "self.log.info('connection success') self.log.info('sqlite3 version {}'.format(sqlite3.version)) if commands is None: commands", "database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) # database.command.get_site = 'SELECT url, popularity_score, monthly_visitations", "'123123a')) # database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a'))", "5, 1000000)) # database.command.get_site = 'SELECT url, popularity_score, monthly_visitations FROM", "& executing \"\"\" import sqlite3 from lessons.sqlite_example.log import create as", "def get_user_credentials(self, user=None, id=None): # users = self.fetch(self.command.get_users) # if", "password FROM users ORDER BY ID DESC LIMIT 1' drop_table", "websites ( # id INTEGER PRIMARY KEY AUTOINCREMENT, # url", "+ '\\n') for i in table_list: s = [] for", "as e: self.log.exception(e) def fetch_log(self, *args, **kw): rows = self.fetch(*args,", "'\\\\db.db' log = create_logger(log_file=log_file) database = DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@", "self.fetch(self.command.get_last_user) if len(user_credentials) > 0: return user_credentials[0] if \"__main__\" ==", "''' # database.command.add_website = 'INSERT INTO websites (url, popularity_score, monthly_visitations)", "try: self.log = log self.log.info('connecting to database') self.connection = sqlite3.connect(db_file)", "# ) # ''' # database.command.add_website = 'INSERT INTO websites", "a database connection to the SQLite database specified by db_file", "\"\"\" import sqlite3 from lessons.sqlite_example.log import create as create_logger class", "# database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials = database.get_user_credentials(id='14')", "None: self.execute(command) try: return self.cursor.fetchall() except Exception as e: self.log.exception(e)", "for r in rows: self.log.info(r) return rows class DataBaseExtention(DataBase): #", "user_credentials = self.fetch(self.command.get_user_by_user_id.format(user)) elif id is not None: user_credentials =", "len(user_credentials) > 0: return user_credentials[0] if \"__main__\" == __name__: import", "* from {}\".format(table)) table_list = self.cursor.fetchall() with open(file_name, permission) as", "(url, popularity_score, monthly_visitations) VALUES (\\'{}\\', \\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table) #", "# popularity_score: integer, # monthly_visitations: integer # ) # database.command.create_websites_table", "# database.command.get_site = 'SELECT url, popularity_score, monthly_visitations FROM websites WHERE", "table that includes ( # url: varchar(1024), # popularity_score: integer,", "rows class DataBaseExtention(DataBase): # def get_user_credentials(self, user=None, id=None): # users", "\\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com',", "create a simple database with websites table that includes (", "Exception(e) def execute(self, command, *args, **kwargs): try: return self.cursor.execute(command) except", "file_name, titles, permission='w'): try: self.cursor.execute(\"select * from {}\".format(table)) table_list =", "IF NOT EXISTS websites ( # id INTEGER PRIMARY KEY", "\"\"\" try: self.log = log self.log.info('connecting to database') self.connection =", "popularity_score INTEGER, # monthly_visitations INTEGER # ) # ''' #", "'123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials =", "showing connection logs, DB version, errors during fetching & executing", "= ''' # CREATE TABLE IF NOT EXISTS websites (", "connection \"\"\" try: self.log = log self.log.info('connecting to database') self.connection", "INTEGER # ) # ''' # database.command.add_website = 'INSERT INTO", "i: # return i # if id is not None:", "executing \"\"\" import sqlite3 from lessons.sqlite_example.log import create as create_logger", "= self.fetch(self.command.get_users) # if user is not None: # for", "*args, **kwargs): try: return self.cursor.execute(command) except Exception as e: self.log.exception(e)", "by db_file :param db_file: database file \"\"\" def __init__(self, db_file,", "db_file :param db_file: database file \"\"\" def __init__(self, db_file, log,", "def get_user_credentials(self, user=None, id=None): if user is not None: user_credentials", "user_credentials = database.get_user_credentials(id='14') # database.connection.commit() # database.connection.close() # print(user_credentials) #", "file \"\"\" def __init__(self, db_file, log, commands=None): \"\"\" database connection", "if user is not None: # for i in users:", "return i # if id is not None: # return", "if \"__main__\" == __name__: import os log_file = os.path.dirname(os.path.abspath(__file__)) +", "return users[id][1:] # return users[-1][1:] def get_user_credentials(self, user=None, id=None): if", "{}\".format(table)) table_list = self.cursor.fetchall() with open(file_name, permission) as f: f.write(','.join(titles)", "for i in table_list: s = [] for a in", "get_last_user = 'SELECT user_id, password FROM users ORDER BY ID", "TEXT, # popularity_score INTEGER, # monthly_visitations INTEGER # ) #", "log_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db'", "= 'DROP TABLE IF EXISTS {};' class DataBase: \"\"\" create", "''' add_user = 'INSERT INTO users (user_id, password) VALUES (\\'{}\\',", "user=None, id=None): if user is not None: user_credentials = self.fetch(self.command.get_user_by_user_id.format(user))", "= 'INSERT INTO users (user_id, password) VALUES (\\'{}\\', \\'{}\\');' get_users", "i # if id is not None: # return users[id][1:]", "user_id, password FROM users WHERE user_id = \\'{}\\';' get_user_by_id =", "'SELECT user_id, password FROM users WHERE user_id = \\'{}\\';' get_user_by_id", "self.connection = sqlite3.connect(db_file) self.cursor = self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3 version", "return user_credentials[0] if \"__main__\" == __name__: import os log_file =", "user_id, password FROM users;' get_user_by_user_id = 'SELECT user_id, password FROM", "database.connection.commit() # database.connection.close() # print(user_credentials) # create a simple database", "VALUES (\\'{}\\', \\'{}\\');' get_users = 'SELECT user_id, password FROM users;'", "try: return self.cursor.fetchall() except Exception as e: self.log.exception(e) def export_from_table_to_file(self,", "\"\"\" create a database connection to the SQLite database specified", "'123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a'))", "# return users[id][1:] # return users[-1][1:] def get_user_credentials(self, user=None, id=None):", "database.get_user_credentials(id='14') # database.connection.commit() # database.connection.close() # print(user_credentials) # create a", "user_credentials = self.fetch(self.command.get_last_user) if len(user_credentials) > 0: return user_credentials[0] if", "'SELECT url, popularity_score, monthly_visitations FROM websites WHERE url = \\'{}\\';'", "FROM users WHERE user_id = \\'{}\\';' get_user_by_id = 'SELECT user_id,", "print(url, popularity, visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id', 'url', 'popularity_score', 'monthly_visitations')", "self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3 version {}'.format(sqlite3.version)) if commands is None:", "INTO users (user_id, password) VALUES (\\'{}\\', \\'{}\\');' get_users = 'SELECT", "is not None: # for i in users: # if", "self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials = self.fetch(self.command.get_last_user) if len(user_credentials) > 0: return", "errors during fetching & executing \"\"\" import sqlite3 from lessons.sqlite_example.log", "sqlite3.connect(db_file) self.cursor = self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3 version {}'.format(sqlite3.version)) if", "1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) # database.command.get_site = 'SELECT url,", "e: self.log.exception(e) def fetch_log(self, *args, **kw): rows = self.fetch(*args, **kw)", "= \\'{}\\';' # url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] # #", "# database.command.add_website = 'INSERT INTO websites (url, popularity_score, monthly_visitations) VALUES", "is not None: user_credentials = self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials = self.fetch(self.command.get_last_user)", "as f: f.write(','.join(titles) + '\\n') for i in table_list: s", "AUTOINCREMENT, # url TEXT, # popularity_score INTEGER, # monthly_visitations INTEGER", "id = \\'{}\\';''' get_last_user = 'SELECT user_id, password FROM users", "DataBase: \"\"\" create a database connection to the SQLite database", "for i in users: # if user in i: #", "try: return self.cursor.execute(command) except Exception as e: self.log.exception(e) def fetch(self,", "s = [] for a in i: s.append(str(a)) f.write(','.join(s) +", "# database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003',", "**kw): rows = self.fetch(*args, **kw) if rows is not None:", "for a in i: s.append(str(a)) f.write(','.join(s) + '\\n') except Exception", "DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008',", "KEY AUTOINCREMENT, # url TEXT, # popularity_score INTEGER, # monthly_visitations", "commands except Exception as e: self.log.exception(e) raise Exception(e) def execute(self,", "monthly_visitations) VALUES (\\'{}\\', \\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5,", "self.fetch(self.command.get_users) # if user is not None: # for i", "# database.connection.close() # print(user_credentials) # create a simple database with", "a user credentials database with: user_id & password logger showing", "# user_credentials = database.get_user_credentials(id='14') # database.connection.commit() # database.connection.close() # print(user_credentials)", "database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001',", "f.write(','.join(titles) + '\\n') for i in table_list: s = []", "''' CREATE TABLE IF NOT EXISTS users ( id INTEGER", "i in table_list: s = [] for a in i:", "except Exception as e: self.log.exception(e) def fetch(self, command=None, *args, **kw):", "connection logs, DB version, errors during fetching & executing \"\"\"", "database') self.connection = sqlite3.connect(db_file) self.cursor = self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3", "from lessons.sqlite_example.log import create as create_logger class Commands: create_users_table =", "database.execute(database.command.add_user.format('kb0004', '123123a')) # database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001',", "*args, **kw): if command is not None: self.execute(command) try: return", "= [] for a in i: s.append(str(a)) f.write(','.join(s) + '\\n')", "= \\'{}\\';''' get_last_user = 'SELECT user_id, password FROM users ORDER", "'INSERT INTO websites (url, popularity_score, monthly_visitations) VALUES (\\'{}\\', \\'{}\\', \\'{}\\');'", "= os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log = create_logger(log_file=log_file) database = DataBaseExtention(db_file,", "<reponame>eliranM98/python_course \"\"\" in this example we want to create a", "in i: # return i # if id is not", "logger showing connection logs, DB version, errors during fetching &", "AUTOINCREMENT, user_id text, password text ); ''' add_user = 'INSERT", "try: self.cursor.execute(\"select * from {}\".format(table)) table_list = self.cursor.fetchall() with open(file_name,", "database file \"\"\" def __init__(self, db_file, log, commands=None): \"\"\" database", "is not None: for r in rows: self.log.info(r) return rows", "# CREATE TABLE IF NOT EXISTS websites ( # id", "NOT EXISTS users ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id", "user_credentials[0] if \"__main__\" == __name__: import os log_file = os.path.dirname(os.path.abspath(__file__))", "self.log.exception(e) def fetch(self, command=None, *args, **kw): if command is not", "fetching & executing \"\"\" import sqlite3 from lessons.sqlite_example.log import create", "+ '\\\\db.db' log = create_logger(log_file=log_file) database = DataBaseExtention(db_file, log) #", "= self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3 version {}'.format(sqlite3.version)) if commands is", "= Commands self.command = commands except Exception as e: self.log.exception(e)", "not None: # for i in users: # if user", "s.append(str(a)) f.write(','.join(s) + '\\n') except Exception as e: self.log.exception(e) def", "INTO websites (url, popularity_score, monthly_visitations) VALUES (\\'{}\\', \\'{}\\', \\'{}\\');' #", "popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url, popularity, visitations) database.export_from_table_to_file(", "import os log_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__))", "INTEGER PRIMARY KEY AUTOINCREMENT, # url TEXT, # popularity_score INTEGER,", "password) VALUES (\\'{}\\', \\'{}\\');' get_users = 'SELECT user_id, password FROM", "Exception as e: self.log.exception(e) def export_from_table_to_file(self, table, file_name, titles, permission='w'):", "WHERE user_id = \\'{}\\';' get_user_by_id = 'SELECT user_id, password FROM", "a simple database with websites table that includes ( #", "table='websites', file_name='exported.csv', titles=('id', 'url', 'popularity_score', 'monthly_visitations') ) # database.connection.commit() database.connection.close()", "user_id = \\'{}\\';' get_user_by_id = 'SELECT user_id, password FROM users", "= self.fetch(self.command.get_user_by_id.format(id)) else: user_credentials = self.fetch(self.command.get_last_user) if len(user_credentials) > 0:", "# monthly_visitations INTEGER # ) # ''' # database.command.add_website =", "INTEGER, # monthly_visitations INTEGER # ) # ''' # database.command.add_website", "includes ( # url: varchar(1024), # popularity_score: integer, # monthly_visitations:", "export_from_table_to_file(self, table, file_name, titles, permission='w'): try: self.cursor.execute(\"select * from {}\".format(table))", "(\\'{}\\', \\'{}\\');' get_users = 'SELECT user_id, password FROM users;' get_user_by_user_id", "user is not None: # for i in users: #", "command is not None: self.execute(command) try: return self.cursor.fetchall() except Exception", "that includes ( # url: varchar(1024), # popularity_score: integer, #", "the SQLite database specified by db_file :param db_file: database file", "'123123a')) # database.execute(database.command.add_user.format('op0001', '123123a')) # database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a'))", "from {}\".format(table)) table_list = self.cursor.fetchall() with open(file_name, permission) as f:", "= 'SELECT url, popularity_score, monthly_visitations FROM websites WHERE url =", "password logger showing connection logs, DB version, errors during fetching", "# if user is not None: # for i in", "database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials = database.get_user_credentials(id='14') #", "# database.execute(database.command.add_user.format('gv0001', '123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a')) #", "get_user_credentials(self, user=None, id=None): # users = self.fetch(self.command.get_users) # if user", "if id is not None: # return users[id][1:] # return", "database with: user_id & password logger showing connection logs, DB", "**kw): if command is not None: self.execute(command) try: return self.cursor.fetchall()", "user=None, id=None): # users = self.fetch(self.command.get_users) # if user is", "os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log =", "database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials = database.get_user_credentials(id='14') # database.connection.commit() # database.connection.close()", "id INTEGER PRIMARY KEY AUTOINCREMENT, # url TEXT, # popularity_score", "# database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a')) #", "= DataBaseExtention(db_file, log) # @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) #", "BY ID DESC LIMIT 1' drop_table = 'DROP TABLE IF", "database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006', '123123a')) #", "PRIMARY KEY AUTOINCREMENT, user_id text, password text ); ''' add_user", "is None: commands = Commands self.command = commands except Exception", "'SELECT user_id, password FROM users ORDER BY ID DESC LIMIT", "**kw) if rows is not None: for r in rows:", "url TEXT, # popularity_score INTEGER, # monthly_visitations INTEGER # )", "# database.execute(database.command.add_user.format('af0006', '123123a')) # database.execute(database.command.add_user.format('jh0003', '123123a')) # database.execute(database.command.add_user.format('kb0004', '123123a')) #", "= commands except Exception as e: self.log.exception(e) raise Exception(e) def", "( id INTEGER PRIMARY KEY AUTOINCREMENT, user_id text, password text", "TABLE IF NOT EXISTS websites ( # id INTEGER PRIMARY", "# database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a')) #", "this example we want to create a user credentials database", "db_file, log, commands=None): \"\"\" database connection \"\"\" try: self.log =", "# database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000))", "ORDER BY ID DESC LIMIT 1' drop_table = 'DROP TABLE", "= 'INSERT INTO websites (url, popularity_score, monthly_visitations) VALUES (\\'{}\\', \\'{}\\',", "**kwargs): try: return self.cursor.execute(command) except Exception as e: self.log.exception(e) def", "'DROP TABLE IF EXISTS {};' class DataBase: \"\"\" create a", "= log self.log.info('connecting to database') self.connection = sqlite3.connect(db_file) self.cursor =", "if len(user_credentials) > 0: return user_credentials[0] if \"__main__\" == __name__:", "1' drop_table = 'DROP TABLE IF EXISTS {};' class DataBase:", "database.execute(database.command.add_website.format('https://www.youtube.com', 6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) # database.command.get_site =", "6, 1300000000)) # database.execute(database.command.add_website.format('https://www.python.org', 5, 1000000)) # database.command.get_site = 'SELECT", "with websites table that includes ( # url: varchar(1024), #", "None: commands = Commands self.command = commands except Exception as", "\\'{}\\';' get_user_by_id = 'SELECT user_id, password FROM users WHERE id", "command, *args, **kwargs): try: return self.cursor.execute(command) except Exception as e:", "websites (url, popularity_score, monthly_visitations) VALUES (\\'{}\\', \\'{}\\', \\'{}\\');' # database.execute(database.command.create_websites_table)", "PRIMARY KEY AUTOINCREMENT, # url TEXT, # popularity_score INTEGER, #", "\\'{}\\';' # url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url,", "# print(url, popularity, visitations) database.export_from_table_to_file( table='websites', file_name='exported.csv', titles=('id', 'url', 'popularity_score',", "rows is not None: for r in rows: self.log.info(r) return", "\\'{}\\');' get_users = 'SELECT user_id, password FROM users;' get_user_by_user_id =", "we want to create a user credentials database with: user_id", "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ # database.execute(database.command.drop_table.format('users')) # database.execute(database.command.create_users_table) # database.execute(database.command.add_user.format('cs0008', '123123a')) # database.execute(database.command.add_user.format('af0006',", "*args, **kw): rows = self.fetch(*args, **kw) if rows is not", "commands=None): \"\"\" database connection \"\"\" try: self.log = log self.log.info('connecting", "# database.command.create_websites_table = ''' # CREATE TABLE IF NOT EXISTS", "database connection \"\"\" try: self.log = log self.log.info('connecting to database')", "def __init__(self, db_file, log, commands=None): \"\"\" database connection \"\"\" try:", "e: self.log.exception(e) def export_from_table_to_file(self, table, file_name, titles, permission='w'): try: self.cursor.execute(\"select", "id INTEGER PRIMARY KEY AUTOINCREMENT, user_id text, password text );", "password FROM users;' get_user_by_user_id = 'SELECT user_id, password FROM users", "drop_table = 'DROP TABLE IF EXISTS {};' class DataBase: \"\"\"", "# database.execute(database.command.add_user.format('qa0000', '123123a')) # user_credentials = database.get_user_credentials(id='14') # database.connection.commit() #", "EXISTS websites ( # id INTEGER PRIMARY KEY AUTOINCREMENT, #", "= self.fetch(self.command.get_user_by_user_id.format(user)) elif id is not None: user_credentials = self.fetch(self.command.get_user_by_id.format(id))", "# url, popularity, visitations = database.fetch(database.command.get_site.format('https://www.python.org'))[0] # # print(url, popularity,", "logs, DB version, errors during fetching & executing \"\"\" import", "(user_id, password) VALUES (\\'{}\\', \\'{}\\');' get_users = 'SELECT user_id, password", "users: # if user in i: # return i #", "= sqlite3.connect(db_file) self.cursor = self.connection.cursor() self.log.info('connection success') self.log.info('sqlite3 version {}'.format(sqlite3.version))", "password text ); ''' add_user = 'INSERT INTO users (user_id,", "__name__: import os log_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\log.txt' db_file =", "raise Exception(e) def execute(self, command, *args, **kwargs): try: return self.cursor.execute(command)", "'SELECT user_id, password FROM users WHERE id = \\'{}\\';''' get_last_user", "i in users: # if user in i: # return", "database.command.add_website = 'INSERT INTO websites (url, popularity_score, monthly_visitations) VALUES (\\'{}\\',", "EXISTS {};' class DataBase: \"\"\" create a database connection to", "database.execute(database.command.create_websites_table) # database.execute(database.command.add_website.format('https://www.google.com', 5, 4000000000)) # database.execute(database.command.add_website.format('https://www.ynet.com', 3, 5000000)) #", "return users[-1][1:] def get_user_credentials(self, user=None, id=None): if user is not", ":param db_file: database file \"\"\" def __init__(self, db_file, log, commands=None):", "get_user_by_user_id = 'SELECT user_id, password FROM users WHERE user_id =", "self.log.exception(e) def export_from_table_to_file(self, table, file_name, titles, permission='w'): try: self.cursor.execute(\"select *", "db_file = os.path.dirname(os.path.abspath(__file__)) + '\\\\db.db' log = create_logger(log_file=log_file) database =", "\"\"\" in this example we want to create a user", "create a user credentials database with: user_id & password logger", "Exception as e: self.log.exception(e) def fetch_log(self, *args, **kw): rows =", "self.cursor.execute(\"select * from {}\".format(table)) table_list = self.cursor.fetchall() with open(file_name, permission)", "command=None, *args, **kw): if command is not None: self.execute(command) try:", "except Exception as e: self.log.exception(e) def export_from_table_to_file(self, table, file_name, titles,", "\"\"\" database connection \"\"\" try: self.log = log self.log.info('connecting to", "self.execute(command) try: return self.cursor.fetchall() except Exception as e: self.log.exception(e) def", "self.cursor.fetchall() except Exception as e: self.log.exception(e) def export_from_table_to_file(self, table, file_name,", "table, file_name, titles, permission='w'): try: self.cursor.execute(\"select * from {}\".format(table)) table_list", "DB version, errors during fetching & executing \"\"\" import sqlite3", "not None: for r in rows: self.log.info(r) return rows class", "websites table that includes ( # url: varchar(1024), # popularity_score:", "'123123a')) # database.execute(database.command.add_user.format('pm0001', '123123a')) # database.execute(database.command.add_user.format('ps0001', '123123a')) # database.execute(database.command.add_user.format('qa0000', '123123a'))", "as create_logger class Commands: create_users_table = ''' CREATE TABLE IF", "CREATE TABLE IF NOT EXISTS users ( id INTEGER PRIMARY", "create a database connection to the SQLite database specified by", "to the SQLite database specified by db_file :param db_file: database", "monthly_visitations: integer # ) # database.command.create_websites_table = ''' # CREATE", "credentials database with: user_id & password logger showing connection logs,", "with: user_id & password logger showing connection logs, DB version,", "monthly_visitations INTEGER # ) # ''' # database.command.add_website = 'INSERT", "self.cursor.fetchall() with open(file_name, permission) as f: f.write(','.join(titles) + '\\n') for", "if user in i: # return i # if id", "# url: varchar(1024), # popularity_score: integer, # monthly_visitations: integer #", "self.log.info('sqlite3 version {}'.format(sqlite3.version)) if commands is None: commands = Commands", "database.command.create_websites_table = ''' # CREATE TABLE IF NOT EXISTS websites", "text, password text ); ''' add_user = 'INSERT INTO users", "FROM users ORDER BY ID DESC LIMIT 1' drop_table =", "# popularity_score INTEGER, # monthly_visitations INTEGER # ) # '''", "import sqlite3 from lessons.sqlite_example.log import create as create_logger class Commands:" ]
[ "<gh_stars>0 from django.urls import re_path from projectx.consumers import UserWebSocketConsumer from", "from django.urls import re_path from projectx.consumers import UserWebSocketConsumer from .consumers", "import UserWebSocketConsumer from .consumers import UserWebSocketConsumer websocket_urlpatterns = [ re_path(r\"^ws/$\",", "import re_path from projectx.consumers import UserWebSocketConsumer from .consumers import UserWebSocketConsumer", "from .consumers import UserWebSocketConsumer websocket_urlpatterns = [ re_path(r\"^ws/$\", UserWebSocketConsumer.as_asgi()), ]", "django.urls import re_path from projectx.consumers import UserWebSocketConsumer from .consumers import", "UserWebSocketConsumer from .consumers import UserWebSocketConsumer websocket_urlpatterns = [ re_path(r\"^ws/$\", UserWebSocketConsumer.as_asgi()),", "projectx.consumers import UserWebSocketConsumer from .consumers import UserWebSocketConsumer websocket_urlpatterns = [", "re_path from projectx.consumers import UserWebSocketConsumer from .consumers import UserWebSocketConsumer websocket_urlpatterns", "from projectx.consumers import UserWebSocketConsumer from .consumers import UserWebSocketConsumer websocket_urlpatterns =" ]
[ "apphook_pool from .conf import settings class AldrynSearchApphook(CMSApp): name = _(\"aldryn", "_(\"aldryn search\") def get_urls(self, *args, **kwargs): return ['aldryn_search.urls'] if settings.ALDRYN_SEARCH_REGISTER_APPHOOK:", "from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool from .conf", "import settings class AldrynSearchApphook(CMSApp): name = _(\"aldryn search\") def get_urls(self,", "from cms.apphook_pool import apphook_pool from .conf import settings class AldrynSearchApphook(CMSApp):", "name = _(\"aldryn search\") def get_urls(self, *args, **kwargs): return ['aldryn_search.urls']", "import apphook_pool from .conf import settings class AldrynSearchApphook(CMSApp): name =", "search\") def get_urls(self, *args, **kwargs): return ['aldryn_search.urls'] if settings.ALDRYN_SEARCH_REGISTER_APPHOOK: apphook_pool.register(AldrynSearchApphook)", "as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool", "CMSApp from cms.apphook_pool import apphook_pool from .conf import settings class", "import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool", "AldrynSearchApphook(CMSApp): name = _(\"aldryn search\") def get_urls(self, *args, **kwargs): return", "class AldrynSearchApphook(CMSApp): name = _(\"aldryn search\") def get_urls(self, *args, **kwargs):", "= _(\"aldryn search\") def get_urls(self, *args, **kwargs): return ['aldryn_search.urls'] if", "cms.apphook_pool import apphook_pool from .conf import settings class AldrynSearchApphook(CMSApp): name", "ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import", "django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from", ".conf import settings class AldrynSearchApphook(CMSApp): name = _(\"aldryn search\") def", "import CMSApp from cms.apphook_pool import apphook_pool from .conf import settings", "cms.app_base import CMSApp from cms.apphook_pool import apphook_pool from .conf import", "settings class AldrynSearchApphook(CMSApp): name = _(\"aldryn search\") def get_urls(self, *args,", "from .conf import settings class AldrynSearchApphook(CMSApp): name = _(\"aldryn search\")", "<reponame>lab360-ch/aldryn-search<filename>aldryn_search/cms_apps.py<gh_stars>10-100 from django.utils.translation import ugettext_lazy as _ from cms.app_base import", "from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp", "_ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool from" ]
[ "ws.append(df.columns.tolist()) for row in df.values: ws.append(list(row)) row_length = 1 +", "pd from openpyxl import Workbook from openpyxl.chart import BarChart, Reference", "= Reference(ws, min_col=2, max_col=2, min_row=1, max_row=row_length) categories = Reference(ws, min_col=1,", "11 chart.shape = 4 chart.title = '都道府県別の人口' chart.x_axis.title = '都道府県'", "import BarChart, Reference wb = Workbook() ws = wb.active df", "= 1 + len(df.values) values = Reference(ws, min_col=2, max_col=2, min_row=1,", "chart.type = 'bar' chart.style = 11 chart.shape = 4 chart.title", "4 chart.title = '都道府県別の人口' chart.x_axis.title = '都道府県' chart.y_axis.title = '人口'", "= wb.active df = pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row in df.values:", "1 + len(df.values) values = Reference(ws, min_col=2, max_col=2, min_row=1, max_row=row_length)", "'都道府県' chart.y_axis.title = '人口' chart.add_data(values, titles_from_data=True) chart.set_categories(categories) ws.add_chart(chart, 'A9') wb.save('population_horizontal.xlsx')", "'都道府県別の人口' chart.x_axis.title = '都道府県' chart.y_axis.title = '人口' chart.add_data(values, titles_from_data=True) chart.set_categories(categories)", "wb.active df = pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row in df.values: ws.append(list(row))", "= pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row in df.values: ws.append(list(row)) row_length =", "pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row in df.values: ws.append(list(row)) row_length = 1", "min_col=1, min_row=2, max_row=row_length) chart = BarChart() chart.type = 'bar' chart.style", "min_row=2, max_row=row_length) chart = BarChart() chart.type = 'bar' chart.style =", "max_row=row_length) chart = BarChart() chart.type = 'bar' chart.style = 11", "from openpyxl import Workbook from openpyxl.chart import BarChart, Reference wb", "chart.style = 11 chart.shape = 4 chart.title = '都道府県別の人口' chart.x_axis.title", "chart.shape = 4 chart.title = '都道府県別の人口' chart.x_axis.title = '都道府県' chart.y_axis.title", "for row in df.values: ws.append(list(row)) row_length = 1 + len(df.values)", "chart.title = '都道府県別の人口' chart.x_axis.title = '都道府県' chart.y_axis.title = '人口' chart.add_data(values,", "len(df.values) values = Reference(ws, min_col=2, max_col=2, min_row=1, max_row=row_length) categories =", "= '都道府県別の人口' chart.x_axis.title = '都道府県' chart.y_axis.title = '人口' chart.add_data(values, titles_from_data=True)", "<filename>BizPy/openpyxl/20200513/horizontal_chart.py import pandas as pd from openpyxl import Workbook from", "BarChart, Reference wb = Workbook() ws = wb.active df =", "chart.x_axis.title = '都道府県' chart.y_axis.title = '人口' chart.add_data(values, titles_from_data=True) chart.set_categories(categories) ws.add_chart(chart,", "ws = wb.active df = pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row in", "BarChart() chart.type = 'bar' chart.style = 11 chart.shape = 4", "Reference wb = Workbook() ws = wb.active df = pd.read_csv('population.csv')", "max_col=2, min_row=1, max_row=row_length) categories = Reference(ws, min_col=1, min_row=2, max_row=row_length) chart", "chart = BarChart() chart.type = 'bar' chart.style = 11 chart.shape", "= BarChart() chart.type = 'bar' chart.style = 11 chart.shape =", "Workbook() ws = wb.active df = pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row", "pandas as pd from openpyxl import Workbook from openpyxl.chart import", "= Workbook() ws = wb.active df = pd.read_csv('population.csv') ws.append(df.columns.tolist()) for", "df.values: ws.append(list(row)) row_length = 1 + len(df.values) values = Reference(ws,", "as pd from openpyxl import Workbook from openpyxl.chart import BarChart,", "max_row=row_length) categories = Reference(ws, min_col=1, min_row=2, max_row=row_length) chart = BarChart()", "import Workbook from openpyxl.chart import BarChart, Reference wb = Workbook()", "values = Reference(ws, min_col=2, max_col=2, min_row=1, max_row=row_length) categories = Reference(ws,", "in df.values: ws.append(list(row)) row_length = 1 + len(df.values) values =", "min_row=1, max_row=row_length) categories = Reference(ws, min_col=1, min_row=2, max_row=row_length) chart =", "wb = Workbook() ws = wb.active df = pd.read_csv('population.csv') ws.append(df.columns.tolist())", "= 'bar' chart.style = 11 chart.shape = 4 chart.title =", "Workbook from openpyxl.chart import BarChart, Reference wb = Workbook() ws", "= 11 chart.shape = 4 chart.title = '都道府県別の人口' chart.x_axis.title =", "df = pd.read_csv('population.csv') ws.append(df.columns.tolist()) for row in df.values: ws.append(list(row)) row_length", "+ len(df.values) values = Reference(ws, min_col=2, max_col=2, min_row=1, max_row=row_length) categories", "min_col=2, max_col=2, min_row=1, max_row=row_length) categories = Reference(ws, min_col=1, min_row=2, max_row=row_length)", "openpyxl.chart import BarChart, Reference wb = Workbook() ws = wb.active", "ws.append(list(row)) row_length = 1 + len(df.values) values = Reference(ws, min_col=2,", "= Reference(ws, min_col=1, min_row=2, max_row=row_length) chart = BarChart() chart.type =", "'bar' chart.style = 11 chart.shape = 4 chart.title = '都道府県別の人口'", "= 4 chart.title = '都道府県別の人口' chart.x_axis.title = '都道府県' chart.y_axis.title =", "Reference(ws, min_col=2, max_col=2, min_row=1, max_row=row_length) categories = Reference(ws, min_col=1, min_row=2,", "categories = Reference(ws, min_col=1, min_row=2, max_row=row_length) chart = BarChart() chart.type", "openpyxl import Workbook from openpyxl.chart import BarChart, Reference wb =", "from openpyxl.chart import BarChart, Reference wb = Workbook() ws =", "row_length = 1 + len(df.values) values = Reference(ws, min_col=2, max_col=2,", "row in df.values: ws.append(list(row)) row_length = 1 + len(df.values) values", "import pandas as pd from openpyxl import Workbook from openpyxl.chart", "Reference(ws, min_col=1, min_row=2, max_row=row_length) chart = BarChart() chart.type = 'bar'", "= '都道府県' chart.y_axis.title = '人口' chart.add_data(values, titles_from_data=True) chart.set_categories(categories) ws.add_chart(chart, 'A9')" ]
[ "'id': instance.job_id.hex, }, 'name': instance.name, 'step': instance.step, 'dateCreated': instance.date_created, }", "import LogSource @register(LogSource) class LogSourceSerializer(Serializer): def serialize(self, instance, attrs): return", "register from changes.models.log import LogSource @register(LogSource) class LogSourceSerializer(Serializer): def serialize(self,", "{ 'id': instance.id.hex, 'job': { 'id': instance.job_id.hex, }, 'name': instance.name,", "<filename>changes/api/serializer/models/logsource.py<gh_stars>1-10 from changes.api.serializer import Serializer, register from changes.models.log import LogSource", "class LogSourceSerializer(Serializer): def serialize(self, instance, attrs): return { 'id': instance.id.hex,", "changes.api.serializer import Serializer, register from changes.models.log import LogSource @register(LogSource) class", "Serializer, register from changes.models.log import LogSource @register(LogSource) class LogSourceSerializer(Serializer): def", "return { 'id': instance.id.hex, 'job': { 'id': instance.job_id.hex, }, 'name':", "import Serializer, register from changes.models.log import LogSource @register(LogSource) class LogSourceSerializer(Serializer):", "@register(LogSource) class LogSourceSerializer(Serializer): def serialize(self, instance, attrs): return { 'id':", "{ 'id': instance.job_id.hex, }, 'name': instance.name, 'step': instance.step, 'dateCreated': instance.date_created,", "from changes.api.serializer import Serializer, register from changes.models.log import LogSource @register(LogSource)", "'job': { 'id': instance.job_id.hex, }, 'name': instance.name, 'step': instance.step, 'dateCreated':", "def serialize(self, instance, attrs): return { 'id': instance.id.hex, 'job': {", "serialize(self, instance, attrs): return { 'id': instance.id.hex, 'job': { 'id':", "LogSource @register(LogSource) class LogSourceSerializer(Serializer): def serialize(self, instance, attrs): return {", "attrs): return { 'id': instance.id.hex, 'job': { 'id': instance.job_id.hex, },", "from changes.models.log import LogSource @register(LogSource) class LogSourceSerializer(Serializer): def serialize(self, instance,", "instance, attrs): return { 'id': instance.id.hex, 'job': { 'id': instance.job_id.hex,", "'id': instance.id.hex, 'job': { 'id': instance.job_id.hex, }, 'name': instance.name, 'step':", "LogSourceSerializer(Serializer): def serialize(self, instance, attrs): return { 'id': instance.id.hex, 'job':", "instance.id.hex, 'job': { 'id': instance.job_id.hex, }, 'name': instance.name, 'step': instance.step,", "changes.models.log import LogSource @register(LogSource) class LogSourceSerializer(Serializer): def serialize(self, instance, attrs):" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "NVIDIA CORPORATION. All rights reserved. # # Licensed under the", "reference image file for affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd path\", type=str)", "img = nib.load(args.reference_path) img_affine = img.affine nrrd = nrrd.read(args.input_path) data", "argparse import nibabel as nib import nrrd import numpy as", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "# limitations under the License. import argparse import nibabel as", "parser.parse_args() img = nib.load(args.reference_path) img_affine = img.affine nrrd = nrrd.read(args.input_path)", "distributed under the License is distributed on an \"AS IS\"", "args = parser.parse_args() img = nib.load(args.reference_path) img_affine = img.affine nrrd", "img_affine = img.affine nrrd = nrrd.read(args.input_path) data = np.flip(nrrd[0], axis=1)", "type=str) parser.add_argument(\"--reference_path\", help=\"Reference image path\", type=str) parser.add_argument(\"--output_path\", help=\"Output nifti path\",", "the specific language governing permissions and # limitations under the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "governing permissions and # limitations under the License. import argparse", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "image file for affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd path\", type=str) parser.add_argument(\"--reference_path\",", "rights reserved. # # Licensed under the Apache License, Version", "parser.add_argument(\"--reference_path\", help=\"Reference image path\", type=str) parser.add_argument(\"--output_path\", help=\"Output nifti path\", type=str)", "path\", type=str) args = parser.parse_args() img = nib.load(args.reference_path) img_affine =", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "language governing permissions and # limitations under the License. import", "nrrd path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference image path\", type=str) parser.add_argument(\"--output_path\", help=\"Output", "nib import nrrd import numpy as np parser = argparse.ArgumentParser(\"Convert", "nrrd.read(args.input_path) data = np.flip(nrrd[0], axis=1) nft_img = nib.Nifti1Image(data, img_affine) nib.save(nft_img,", "import argparse import nibabel as nib import nrrd import numpy", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= argparse.ArgumentParser(\"Convert nrrd label to nifti with reference image file", "reserved. # # Licensed under the Apache License, Version 2.0", "= nrrd.read(args.input_path) data = np.flip(nrrd[0], axis=1) nft_img = nib.Nifti1Image(data, img_affine)", "nifti with reference image file for affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd", "CONDITIONS OF ANY KIND, either express or implied. # See", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "nrrd import numpy as np parser = argparse.ArgumentParser(\"Convert nrrd label", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference image path\",", "import nibabel as nib import nrrd import numpy as np", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "with reference image file for affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd path\",", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "License for the specific language governing permissions and # limitations", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "limitations under the License. import argparse import nibabel as nib", "parser.add_argument(\"--input_path\", help=\"Input nrrd path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference image path\", type=str)", "CORPORATION. All rights reserved. # # Licensed under the Apache", "and # limitations under the License. import argparse import nibabel", "help=\"Output nifti path\", type=str) args = parser.parse_args() img = nib.load(args.reference_path)", "as np parser = argparse.ArgumentParser(\"Convert nrrd label to nifti with", "the License for the specific language governing permissions and #", "nib.load(args.reference_path) img_affine = img.affine nrrd = nrrd.read(args.input_path) data = np.flip(nrrd[0],", "(the \"License\"); # you may not use this file except", "type=str) args = parser.parse_args() img = nib.load(args.reference_path) img_affine = img.affine", "Apache License, Version 2.0 (the \"License\"); # you may not", "# you may not use this file except in compliance", "<filename>examples/prostate/data_preparation/utils/nrrd_to_nifti.py # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.", "either express or implied. # See the License for the", "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. #", "OR CONDITIONS OF ANY KIND, either express or implied. #", "Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "the License is distributed on an \"AS IS\" BASIS, #", "in compliance with the License. # You may obtain a", "numpy as np parser = argparse.ArgumentParser(\"Convert nrrd label to nifti", "import numpy as np parser = argparse.ArgumentParser(\"Convert nrrd label to", "type=str) parser.add_argument(\"--output_path\", help=\"Output nifti path\", type=str) args = parser.parse_args() img", "image path\", type=str) parser.add_argument(\"--output_path\", help=\"Output nifti path\", type=str) args =", "2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed under", "software # distributed under the License is distributed on an", "parser.add_argument(\"--output_path\", help=\"Output nifti path\", type=str) args = parser.parse_args() img =", "img.affine nrrd = nrrd.read(args.input_path) data = np.flip(nrrd[0], axis=1) nft_img =", "under the License. import argparse import nibabel as nib import", "help=\"Reference image path\", type=str) parser.add_argument(\"--output_path\", help=\"Output nifti path\", type=str) args", "nrrd label to nifti with reference image file for affine\")", "as nib import nrrd import numpy as np parser =", "# # Unless required by applicable law or agreed to", "np parser = argparse.ArgumentParser(\"Convert nrrd label to nifti with reference", "= img.affine nrrd = nrrd.read(args.input_path) data = np.flip(nrrd[0], axis=1) nft_img", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "help=\"Input nrrd path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference image path\", type=str) parser.add_argument(\"--output_path\",", "Version 2.0 (the \"License\"); # you may not use this", "label to nifti with reference image file for affine\") parser.add_argument(\"--input_path\",", "= nib.load(args.reference_path) img_affine = img.affine nrrd = nrrd.read(args.input_path) data =", "law or agreed to in writing, software # distributed under", "(c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # Licensed", "= parser.parse_args() img = nib.load(args.reference_path) img_affine = img.affine nrrd =", "the License. import argparse import nibabel as nib import nrrd", "nrrd = nrrd.read(args.input_path) data = np.flip(nrrd[0], axis=1) nft_img = nib.Nifti1Image(data,", "argparse.ArgumentParser(\"Convert nrrd label to nifti with reference image file for", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"License\"); # you may not use this file except in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "License. import argparse import nibabel as nib import nrrd import", "All rights reserved. # # Licensed under the Apache License,", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "to in writing, software # distributed under the License is", "for affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference image", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "You may obtain a copy of the License at #", "path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference image path\", type=str) parser.add_argument(\"--output_path\", help=\"Output nifti", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "nibabel as nib import nrrd import numpy as np parser", "required by applicable law or agreed to in writing, software", "import nrrd import numpy as np parser = argparse.ArgumentParser(\"Convert nrrd", "data = np.flip(nrrd[0], axis=1) nft_img = nib.Nifti1Image(data, img_affine) nib.save(nft_img, args.output_path)", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "path\", type=str) parser.add_argument(\"--output_path\", help=\"Output nifti path\", type=str) args = parser.parse_args()", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "parser = argparse.ArgumentParser(\"Convert nrrd label to nifti with reference image", "to nifti with reference image file for affine\") parser.add_argument(\"--input_path\", help=\"Input", "the Apache License, Version 2.0 (the \"License\"); # you may", "permissions and # limitations under the License. import argparse import", "file for affine\") parser.add_argument(\"--input_path\", help=\"Input nrrd path\", type=str) parser.add_argument(\"--reference_path\", help=\"Reference", "nifti path\", type=str) args = parser.parse_args() img = nib.load(args.reference_path) img_affine" ]
[ "- Production/Stable', 'Environment :: Console', 'Operating System :: OS Independent',", "setup import io import os import re version_re = re.compile(r'^__version__", "f: line = line.rstrip() m = version_re.match(line) if m: version", "= \"([^\"]*)\"$') # Find the version number. with open('rst2ctags.py', 'r')", ":: Software Development', 'Topic :: Text Processing', 'Topic :: Text", "'Topic :: Text Processing :: Indexing', 'Topic :: Utilities', ]", "line = line.rstrip() m = version_re.match(line) if m: version =", "m.group(1) break else: raise RuntimeError(\"Couldn't find version string in rst2ctags.py\")", "a ' 'reStructuredText document.', long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version,", "re version_re = re.compile(r'^__version__ = \"([^\"]*)\"$') # Find the version", "zip_safe=True, entry_points={ 'console_scripts': [ 'rst2ctags = rst2ctags:cli_main', ], }, classifiers=[", "description='Generates ctags-compatible output for the sections of a ' 'reStructuredText", "os import re version_re = re.compile(r'^__version__ = \"([^\"]*)\"$') # Find", "as f: for line in f: line = line.rstrip() m", "line in f: line = line.rstrip() m = version_re.match(line) if", "OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language", "'rst2ctags = rst2ctags:cli_main', ], }, classifiers=[ 'License :: OSI Approved", "m = version_re.match(line) if m: version = m.group(1) break else:", "string in rst2ctags.py\") # Load the description. readme_path = os.path.join(os.path.dirname(__file__),", "setuptools import setup import io import os import re version_re", "for the sections of a ' 'reStructuredText document.', long_description=long_description, license='BSD',", "version_re = re.compile(r'^__version__ = \"([^\"]*)\"$') # Find the version number.", "License', 'Development Status :: 5 - Production/Stable', 'Environment :: Console',", ":: Python :: 2.7', 'Programming Language :: Python :: 3',", ":: 5 - Production/Stable', 'Environment :: Console', 'Operating System ::", "ctags-compatible output for the sections of a ' 'reStructuredText document.',", ":: 2.7', 'Programming Language :: Python :: 3', 'Topic ::", "3', 'Topic :: Software Development', 'Topic :: Text Processing', 'Topic", "with open('rst2ctags.py', 'r') as f: for line in f: line", "classifiers=[ 'License :: OSI Approved :: BSD License', 'Development Status", "'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Operating", "py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts': [ 'rst2ctags = rst2ctags:cli_main', ], },", "Console', 'Operating System :: OS Independent', 'Programming Language :: Python", "Python :: 2.7', 'Programming Language :: Python :: 3', 'Topic", "'License :: OSI Approved :: BSD License', 'Development Status ::", "raise RuntimeError(\"Couldn't find version string in rst2ctags.py\") # Load the", "'r') as f: for line in f: line = line.rstrip()", "find version string in rst2ctags.py\") # Load the description. readme_path", ":: Text Processing :: Indexing', 'Topic :: Utilities', ] )", ":: OSI Approved :: BSD License', 'Development Status :: 5", "Text Processing', 'Topic :: Text Processing :: Indexing', 'Topic ::", "= f.read() setup( name='rst2ctags', description='Generates ctags-compatible output for the sections", "rst2ctags:cli_main', ], }, classifiers=[ 'License :: OSI Approved :: BSD", "Production/Stable', 'Environment :: Console', 'Operating System :: OS Independent', 'Programming", "else: raise RuntimeError(\"Couldn't find version string in rst2ctags.py\") # Load", "Language :: Python :: 2.7', 'Programming Language :: Python ::", "Python :: 3', 'Topic :: Software Development', 'Topic :: Text", "= version_re.match(line) if m: version = m.group(1) break else: raise", "for line in f: line = line.rstrip() m = version_re.match(line)", "'console_scripts': [ 'rst2ctags = rst2ctags:cli_main', ], }, classifiers=[ 'License ::", "5 - Production/Stable', 'Environment :: Console', 'Operating System :: OS", "Approved :: BSD License', 'Development Status :: 5 - Production/Stable',", "io import os import re version_re = re.compile(r'^__version__ = \"([^\"]*)\"$')", "f: for line in f: line = line.rstrip() m =", "line.rstrip() m = version_re.match(line) if m: version = m.group(1) break", "Software Development', 'Topic :: Text Processing', 'Topic :: Text Processing", "io.open(readme_path, encoding='utf-8') as f: long_description = f.read() setup( name='rst2ctags', description='Generates", "Development', 'Topic :: Text Processing', 'Topic :: Text Processing ::", "entry_points={ 'console_scripts': [ 'rst2ctags = rst2ctags:cli_main', ], }, classifiers=[ 'License", "= rst2ctags:cli_main', ], }, classifiers=[ 'License :: OSI Approved ::", "the description. readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path, encoding='utf-8') as", "author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts': [ 'rst2ctags =", "'Topic :: Text Processing', 'Topic :: Text Processing :: Indexing',", "'Operating System :: OS Independent', 'Programming Language :: Python ::", "in f: line = line.rstrip() m = version_re.match(line) if m:", "url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts': [ 'rst2ctags = rst2ctags:cli_main',", "}, classifiers=[ 'License :: OSI Approved :: BSD License', 'Development", "Status :: 5 - Production/Stable', 'Environment :: Console', 'Operating System", "import io import os import re version_re = re.compile(r'^__version__ =", "'Programming Language :: Python :: 2.7', 'Programming Language :: Python", "open('rst2ctags.py', 'r') as f: for line in f: line =", "import os import re version_re = re.compile(r'^__version__ = \"([^\"]*)\"$') #", "if m: version = m.group(1) break else: raise RuntimeError(\"Couldn't find", "'reStructuredText document.', long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True,", "Find the version number. with open('rst2ctags.py', 'r') as f: for", "in rst2ctags.py\") # Load the description. readme_path = os.path.join(os.path.dirname(__file__), 'README.rst')", "version number. with open('rst2ctags.py', 'r') as f: for line in", "rst2ctags.py\") # Load the description. readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') with", "= os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path, encoding='utf-8') as f: long_description =", "RuntimeError(\"Couldn't find version string in rst2ctags.py\") # Load the description.", "Language :: Python :: 3', 'Topic :: Software Development', 'Topic", "break else: raise RuntimeError(\"Couldn't find version string in rst2ctags.py\") #", "output for the sections of a ' 'reStructuredText document.', long_description=long_description,", "long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts':", "long_description = f.read() setup( name='rst2ctags', description='Generates ctags-compatible output for the", "System :: OS Independent', 'Programming Language :: Python :: 2.7',", "name='rst2ctags', description='Generates ctags-compatible output for the sections of a '", "number. with open('rst2ctags.py', 'r') as f: for line in f:", "'README.rst') with io.open(readme_path, encoding='utf-8') as f: long_description = f.read() setup(", "= re.compile(r'^__version__ = \"([^\"]*)\"$') # Find the version number. with", "import setup import io import os import re version_re =", "readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path, encoding='utf-8') as f: long_description", ":: 3', 'Topic :: Software Development', 'Topic :: Text Processing',", "], }, classifiers=[ 'License :: OSI Approved :: BSD License',", ":: Console', 'Operating System :: OS Independent', 'Programming Language ::", "encoding='utf-8') as f: long_description = f.read() setup( name='rst2ctags', description='Generates ctags-compatible", "description. readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path, encoding='utf-8') as f:", "sections of a ' 'reStructuredText document.', long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>',", "re.compile(r'^__version__ = \"([^\"]*)\"$') # Find the version number. with open('rst2ctags.py',", "\"([^\"]*)\"$') # Find the version number. with open('rst2ctags.py', 'r') as", "# Find the version number. with open('rst2ctags.py', 'r') as f:", "version = m.group(1) break else: raise RuntimeError(\"Couldn't find version string", "version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts': [ 'rst2ctags = rst2ctags:cli_main', ],", "2.7', 'Programming Language :: Python :: 3', 'Topic :: Software", ":: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming", "'Programming Language :: Python :: 3', 'Topic :: Software Development',", "of a ' 'reStructuredText document.', long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags',", "with io.open(readme_path, encoding='utf-8') as f: long_description = f.read() setup( name='rst2ctags',", "author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts': [ 'rst2ctags", "import re version_re = re.compile(r'^__version__ = \"([^\"]*)\"$') # Find the", ":: BSD License', 'Development Status :: 5 - Production/Stable', 'Environment", "the version number. with open('rst2ctags.py', 'r') as f: for line", "BSD License', 'Development Status :: 5 - Production/Stable', 'Environment ::", "'Environment :: Console', 'Operating System :: OS Independent', 'Programming Language", ":: Python :: 3', 'Topic :: Software Development', 'Topic ::", "'Topic :: Software Development', 'Topic :: Text Processing', 'Topic ::", "setup( name='rst2ctags', description='Generates ctags-compatible output for the sections of a", "license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={ 'console_scripts': [", "[ 'rst2ctags = rst2ctags:cli_main', ], }, classifiers=[ 'License :: OSI", "Independent', 'Programming Language :: Python :: 2.7', 'Programming Language ::", "= line.rstrip() m = version_re.match(line) if m: version = m.group(1)", "= m.group(1) break else: raise RuntimeError(\"Couldn't find version string in", "from setuptools import setup import io import os import re", "OSI Approved :: BSD License', 'Development Status :: 5 -", "os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path, encoding='utf-8') as f: long_description = f.read()", "<reponame>jszakmeister/rst2ctags from setuptools import setup import io import os import", "the sections of a ' 'reStructuredText document.', long_description=long_description, license='BSD', author='<NAME>',", "version_re.match(line) if m: version = m.group(1) break else: raise RuntimeError(\"Couldn't", "f.read() setup( name='rst2ctags', description='Generates ctags-compatible output for the sections of", "as f: long_description = f.read() setup( name='rst2ctags', description='Generates ctags-compatible output", "document.', long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'], zip_safe=True, entry_points={", "Load the description. readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path, encoding='utf-8')", ":: Text Processing', 'Topic :: Text Processing :: Indexing', 'Topic", "f: long_description = f.read() setup( name='rst2ctags', description='Generates ctags-compatible output for", "Processing', 'Topic :: Text Processing :: Indexing', 'Topic :: Utilities',", "version string in rst2ctags.py\") # Load the description. readme_path =", "m: version = m.group(1) break else: raise RuntimeError(\"Couldn't find version", "' 'reStructuredText document.', long_description=long_description, license='BSD', author='<NAME>', author_email='<EMAIL>', url='https://github.com/jszakmeister/rst2ctags', version=version, py_modules=['rst2ctags'],", "# Load the description. readme_path = os.path.join(os.path.dirname(__file__), 'README.rst') with io.open(readme_path," ]
[ "version number and exit.') help_parser = subparsers.add_parser('help', help='print this help", "'yaml' print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command == 'attach-camera': config, indices, rc", "argv_parsed.add_init_inside is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "status_parser = subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "[y/N] ', end='') ui_input = input().lower() if ui_input in ('n',", "ConnectionError from .core import WorkspaceInstance from .mgmt import get_local_config, add_key,", "2.0 (the \"License\"); # you may not use this file", "None, None if argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config) else: crop =", "active,' ' but there is not one or it is", "to finish' terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width, height=height, crop=crop) except ConnectionError: if not", "access via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on cmdsh',", "ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred while contacting remote server ' 'at", "commands to execute; ' 'for example, ' 'copy-and-paste value shown", "from local configuration, if present rm_wd(get_local_config(), wdid, save=True) elif argv_parsed.command", "captured images; ' 'default depends on the supporting drivers')) attach_camera_parser.add_argument('--crop',", "be unique prefix); ' 'this argument is not required '", "launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if os.fork() != 0: return 0", "container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside', default=None, help='add command to be", "ConnectionError: print('ERROR: failed to reach server. Are you connected to", "# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "idp) if index is None: print('ERROR: given prefix does not", "return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as err: print('ERROR: {}'.format(err))", "'version': from . import __version__ as hardshare_pkg_version print(hardshare_pkg_version) return 0", "find_wd(config, idp) if index is None: print('ERROR: given prefix does", "the server indicates that an instance is active,' ' but", "description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment'", "== 'config': config_parser.print_help() elif argv_parsed.help_target_command == 'rules': rules_parser.print_help() elif argv_parsed.help_target_command", "'for example, ' 'copy-and-paste value shown in `hardshare config -l`", "by anyone? [y/N] ', end='') ui_input = input().lower() if ui_input", "add-on vnc to enable VNC via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true',", "''): return 1 try: ac.add_access_rule(wdid, to_user='*') except Exception as err:", "workspace deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "dev=argv_parsed.camera, width=width, height=height, crop=crop) except ConnectionError: if not argv_parsed.become_daemon: print('ERROR:", "is denied unless a rule explicitly permits it.', ] if", "return 1 else: # cprovider == 'proxy' print('ERROR: --assign-image not", "path does not exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog", "default_flow_style=False)) elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif", "as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok)", "= subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "except: print('Error occurred while contacting rerobots servers') print('Try config -l", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc config['wdeployments'][index]['cprovider'] =", "1 return 0 else: print('failed to detect local instance') return", "None if argv_parsed.command == 'status': try: config = get_local_config() except:", "FileNotFoundError: print('ERROR: cannot reach daemon. Does it exist? (Try `hardshare", "Internet?') return 1 elif argv_parsed.command == 'stop-cameras': local_keys = list_local_keys()", "!= 0: return rc wdeployment_id = config['wdeployments'][index]['id'] local_keys = list_local_keys()", "== 'dissolve': if ac is None: print('no local configuration found.", "exit.') help_parser = subparsers.add_parser('help', help='print this help message and exit')", "= os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path): print('ERROR: given path does not", "rc config['wdeployments'][index]['cprovider'] = selected_cprovider if selected_cprovider == 'proxy': config['wdeployments'][index]['image'] =", "'--verbose', action='store_true', default=False, help='print verbose messages about actions by the", "print('ERROR: failed to reach server. Are you connected to the", "as' ' terminated and attempt local clean-up; this' ' command", "{}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside: config, index, rc", "== 'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command", "= [] for idp in id_prefix: index = find_wd(config, idp)", "daemon)', dest='become_daemon') attach_camera_commanddesc = 'attach camera stream to workspace deployments'", "only 1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add add-on mistyproxy", "return 1 elif argv_parsed.add_rule_permit_all: ui_input = None while ui_input not", "if argv_parsed.targetaddr is None: print('--ip is required with --add') return", "the Internet?') return 1 elif argv_parsed.command == 'rules': if ac", "subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img]) if cp_images.returncode != 0: print('ERROR: given", "= input().lower() if ui_input in ('n', 'no', ''): return 1", ".api import HSAPIClient from .err import Error as HSError from", "default=False, dest='only_local_config', help='only show local configuration data') config_parser.add_argument('--include-dissolved', action='store_true', default=False,", "enter\\n\\n hardshare help addon-cmdsh') return 1 except ValueError as err:", "whether or not started on this host'), dest='all_cameras') addon_cmdsh_commanddesc =", "of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless", "token. Did it expire?') else: print(config['remote']['err']) return 1 if len(config['remote']['deployments'])", "local configuration data') config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved', help='include configuration data", "print('error loading configuration data. does it exist?') return 1 if", "print('Do you want to permit access by anyone? [y/N] ',", "'json': print(json.dumps(res)) else: # output_format == 'yaml' print(yaml.dump(res, default_flow_style=False)) elif", "'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside:", "return config, None, 1 if isinstance(id_prefix, list): if len(id_prefix) ==", "deployment, making it' ' unavailable for any future use' '", "height of captured images; ' 'default depends on the supporting", "(the owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules', help=('remove all access rules;", "not in ['docker', 'podman', 'proxy']: print('ERROR: cprovider must be one", "found, then create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None, help='add program", "License for the specific language governing permissions and # limitations", "rules') return 1 elif argv_parsed.command == 'check': if ac is", "== 'rules': rules_parser.print_help() elif argv_parsed.help_target_command == 'register': register_parser.print_help() elif argv_parsed.help_target_command", "try: ac.add_access_rule(wdid, to_user='*') except Exception as err: print('{}'.format(err)) return 1", "[int(x) for x in argv_parsed.attach_camera_res.split(',')] if width < 1 or", "permit access by anyone? [y/N] ', end='') ui_input = input().lower()", "else: print(config['remote']['err']) return 1 if len(config['remote']['deployments']) == 0: print('\\nno registered", "cp_images = subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode", "argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else: print('Use `hardshare addon-mistyproxy` with a switch.')", "# selected_cprovider \\in {docker, podman} if config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image']", "cannot launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if os.fork() != 0: return", "access rules (also known as capabilities or permissions)' rules_parser =", "else: crop = None if argv_parsed.become_daemon: if os.fork() != 0:", "return config, index, 0 def main(argv=None): pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING)", "as ConnectionError from .core import WorkspaceInstance from .mgmt import get_local_config,", "'manage add-on mistyproxy for your workspace deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy',", "unless a rule explicitly permits it.', ] if output_format ==", "exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?') config_commanddesc = 'manage local and", "'--list', action='store_true', default=False, dest='list_config', help='list configuration') config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config',", "if cprovider == 'proxy': print('--purge not supported for cprovider `proxy`')", "Exception as err: print('{}'.format(err)) return 1 elif argv_parsed.add_rule_permit_all: ui_input =", "owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules', help=('remove all access rules; '", "help=('stop all attached cameras associated with this ' 'user account,", "rc if argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy':", "{} does not exist or ' 'has the wrong permissions.'.format(", "match precisely 1 workspace deployment') return config, None, 1 indices.append(index)", "tok=tok, dev=argv_parsed.camera, width=width, height=height, crop=crop) except ConnectionError: if not argv_parsed.become_daemon:", "print('Use `hardshare addon-mistyproxy` with a switch.') print('To get a help", "cprovider `proxy`') return 1 elif cprovider not in ['docker', 'podman']:", "= 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s)", "not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 findings", "description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False, help=('stop all attached cameras", "default_flow_style=False)) else: if 'local' not in config: config = {", "= [WorkspaceInstance.inspect_instance()] else: findings = [] for wd in config['wdeployments']:", "pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command == 'stop-ad': config, index, rc", "err: print('{}'.format(err)) return 1 if 'err' in res: if res['err']", "return rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside is", "loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});'", "config') return 1 return 0 if __name__ == '__main__': sys.exit(main(sys.argv[1:]))", "if selected_cprovider == 'proxy': config['wdeployments'][index]['image'] = None else: # selected_cprovider", "default=False, help='add add-on vnc to enable VNC via rerobots.net', dest='add_addon_vnc')", "' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command", "['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index])", "action='store_true', default=False, dest='prune_err_keys', help=('delete files in local key directory that'", "Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP address of the", "else: print('Use `hardshare rules` with a switch. For example, `hardshare", "via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on cmdsh', dest='rm_addon_cmdsh')", "list_local_keys() if len(local_keys) < 1: print('No valid keys available. Check:", "deployment')) config_parser.add_argument('-c', '--create', action='store_true', default=False, dest='create_config', help='if no local configuration", "switch. For example, `hardshare config -l`') print('or to get a", "'exists', argv_parsed.cprovider_img]) if cp_images.returncode != 0: print('ERROR: given image name", "stop_cameras_commanddesc = 'stop camera streams previously started by attach-camera' stop_cameras_parser", "camera stream to workspace deployments' attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc)", "is only 1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add add-on", "dissolve_parser.print_help() elif argv_parsed.help_target_command == 'status': status_parser.print_help() elif argv_parsed.help_target_command == 'attach-camera':", "else: print('Use `hardshare addon-vnc` with a switch.') print('To get a", "stderr=subprocess.DEVNULL) except: print('failed to stop container `{}`'.format(findings['container']['name'])) return 1 return", "addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment' ' (can", "then mark it remotely as' ' terminated and attempt local", "json import logging import logging.handlers import os import os.path import", "print(res['err']) return 1 # Remove from local configuration, if present", "addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage", "== 0: if len(config['wdeployments']) > 1: print('ERROR: ambiguous command: more", "of workspace deployment on which to attach' ' (can be", "res = ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred while contacting remote server", "argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is not None: assert ac is", "os import os.path import subprocess import sys import uuid import", "workspace deployment defined.') return config, None, 1 index = [0]", "enter\\n\\n hardshare help rules') return 1 elif argv_parsed.command == 'check':", "argv_parsed.help_target_command is not None: if argv_parsed.help_target_command == 'config': config_parser.print_help() elif", "(advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path', default=None, help='remove device previously", "use' ' (THIS CANNOT BE UNDONE)') dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc,", "`hardshare addon-vnc` with a switch.') print('To get a help message,", "or not started on this host'), dest='all_cameras') addon_cmdsh_commanddesc = 'manage", "(try `hardshare config --create`)') return 1 config, index, rc =", "elif argv_parsed.command == 'addon-cmdsh': if ac is None: print('cannot register", "cprovider must be one of the following: docker, podman, proxy')", "open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras)", "want to permit access by anyone? [y/N] ', end='') ui_input", "is None: print('--ip is required with --add') return 1 add_mistyproxy(wdeployment_id,", "(THIS CANNOT BE UNDONE)') dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid',", "= None else: try: wdid = str(uuid.UUID(argv_parsed.id_prefix)) except: config, index,", "not recognized by cprovider') return 1 elif cprovider == 'docker':", "without initial local configuration.' ' (try `hardshare config --create`)') return", "output_format = argv_parsed.output_format.lower() if output_format not in ['yaml', 'json']: print('output", "and height of captured images; ' 'default depends on the", "1 try: res = ac.dissolve_registration(wdid) except: print('Error occurred while contacting", "return 1 elif cprovider == 'docker': cp_images = subprocess.run([cprovider, 'image',", "already ' 'has wdeployment declared')) check_commanddesc = 'check registration of", "cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--purge not supported", "OF ANY KIND, either express or implied. # See the", "expire?') else: print(res['err']) return 1 res['comments'] = [ 'Access is", "rules; ' 'note that access is denied by default, '", "argv_parsed.become_daemon: if os.fork() != 0: return 0 os.close(0) os.close(1) os.close(2)", "target workspace deployment' ' (can be unique prefix)')) advertise_commanddesc =", "ref = config['local']['wdeployments'] else: ref = config['wdeployments'] for jj, wdeployment", "See the License for the specific language governing permissions and", "= config['wdeployments'] for jj, wdeployment in enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id'])", "example, `hardshare rules -l`') print('or to get a help message,", "to in writing, software # distributed under the License is", "local instance') return 1 else: if ac is None: print('cannot", "or argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except", "yaml from aiohttp.client_exceptions import ClientConnectorError as ConnectionError from .core import", "config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path is not None: config, index, rc", "previously started by attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a',", "help='remove device previously marked for inclusion in container') config_parser.add_argument('--add-init-inside', metavar='CMD',", "terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv) if argv_parsed.print_version", "or agreed to in writing, software # distributed under the", "= subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False, help=('stop all", "default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv) if argv_parsed.print_version or argv_parsed.command", "1 with open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try:", "with this ' 'user account, whether or not started on", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc cprovider =", "be undone. ' '[y/N] ').format(wdid), end='') ui_input = input().lower() if", "not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside)", "add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage add-on mistyproxy for your", "of target workspace deployment' ' (can be unique prefix); '", "cameras associated with this ' 'user account, whether or not", "workspace deployments with this user account:') for wd in config['remote']['deployments']:", "more than 1 workspace deployment defined.') return config, None, 1", "['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:') for terminate_p in", "= config['local']['wdeployments'] else: ref = config['wdeployments'] for jj, wdeployment in", "1 if 'err' in res: if res['err'] == 'not found':", "proxy') return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "['docker', 'podman', 'proxy']: print('ERROR: cprovider must be one of the", "} print('workspace deployments defined in local configuration:') if len(config['local']['wdeployments']) ==", "list_local_keys from .mgmt import find_wd, modify_local, rm_wd from .api import", "contacting rerobots servers') print('Try config -l --local to only get", "is active,' ' but there is not one or it", "key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add path to SSH key pair", "obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0", "action='store_true', default=False, help=('if there is an active instance, then' '", "not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 if", "['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'] = []", "argv_parsed.rm_init_inside: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0:", "compliance with the License. # You may obtain a copy", "' for the hardshare client'), add_help=False) argparser.add_argument('-h', '--help', dest='print_help', action='store_true',", "'--version', action='store_true', default=False, help='print version of hardshare (this) package.', dest='print_version')", "len(config['wdeployments']) == 0: print(('ERROR: no workspace deployment in local configuration.'))", "= fp.read().strip() try: if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is None: print('--ip", "= subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "number and exit.') help_parser = subparsers.add_parser('help', help='print this help message", "metavar='PATH', dest='add_terminate_prog', default=None, help='add program to list of commands to", "and # limitations under the License. \"\"\"Command-line interface \"\"\" import", "crop = json.loads(argv_parsed.attach_camera_crop_config) else: crop = None if argv_parsed.become_daemon: if", "if len(config['wdeployments']) == 0: print(('ERROR: no workspace deployment in local", "print('\\torigin (address) of registration: {}' .format(wd['origin'])) if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved']))", "possible keys with errors:') for err_key_path, err in config['local']['err_keys'].items(): print('\\t", "switch.') print('To get a help message, enter\\n\\n hardshare help addon-vnc')", "(C) 2018 rerobots, Inc. # # Licensed under the Apache", "a switch.') print('To get a help message, enter\\n\\n hardshare help", "input().lower() if ui_input in ('n', 'no', ''): return 1 try:", "width < 1 or height < 1: print('Width, height must", "does not exist') return 1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config)", "wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], )) if wdeployment['cprovider'] in ['docker',", "for your workspace deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix',", "cprovider `proxy`') return 1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog", "type=str, dest='attach_camera_res', default=None, help=('width and height of captured images; '", "that an instance is active,' ' but there is not", "version of hardshare (this) package.', dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true', default=False,", "rc != 0: return rc if argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider']", "not use this file except in compliance with the License.", "get a help message, enter\\n\\n hardshare help addon-cmdsh') return 1", "print('Use `hardshare config` with a switch. For example, `hardshare config", "1 if 'err' in res: if res['err'] == 'wrong authorization", "you may not use this file except in compliance with", "hardshare help config') return 1 return 0 if __name__ ==", "rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use `hardshare config` with a switch.", "to detect local instance') return 1 else: if ac is", "to be executed inside container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside', help='remove", "it exist?') return 1 if argv_parsed.id_prefix is None: if len(config['wdeployments'])", "present rm_wd(get_local_config(), wdid, save=True) elif argv_parsed.command == 'config': if argv_parsed.list_config:", "started by attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all',", "help=('id of workspace deployment to check' ' (can be unique", "workspace deployment in local configuration.')) return config, None, 1 if", "config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: print('The", "default=False, help='add add-on mistyproxy to allow HTTP proxy to Misty", "HSError from .addons import camera_main, stop_cameras from .addons import add_cmdsh,", "argv_parsed.command == 'addon-cmdsh': if ac is None: print('cannot register without", "findings: try: subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed", "print('WARNING: local configuration does not declare SSH key.\\n' 'Instances with", "recognized by cprovider') return 1 elif cprovider == 'docker': cp_images", "attached cameras associated with this ' 'user account, whether or", "to attach' ' (can be unique prefix); ' 'this argument", "(%(levelname)s) (pid: {});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return", "argv_parsed.command == 'addon-vnc': if ac is None: print('cannot register without", "import camera_main, stop_cameras from .addons import add_cmdsh, rm_cmdsh, add_vnc, rm_vnc,", "id_prefix: index = find_wd(config, idp) if index is None: print('ERROR:", "'rt') as fp: tok = fp.read().strip() if argv_parsed.attach_camera_res: width, height", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "by default, ' 'including to you (the owner)')) rules_parser.add_argument('--permit-all', action='store_true',", "0: return rc if argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider'] if cprovider", "else: print(res['err']) return 1 res['comments'] = [ 'Access is denied", "modify_local(config) elif argv_parsed.cprovider_img is not None: config, index, rc =", "import argparse import json import logging import logging.handlers import os", "help_message_purge = ('if the server indicates that an instance is", "default=None, help='remove device previously marked for inclusion in container') config_parser.add_argument('--add-init-inside',", "rc != 0: return rc wdeployment_id = config['wdeployments'][index]['id'] local_keys =", "= ('if the server indicates that an instance is active,'", "addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add add-on cmdsh to enable terminal access", "all rules') rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me', help='permit instantiations by you", "data.' ' does it exist? is it broken?') return 1", "argv_parsed.command == 'check': if ac is None: print('no local configuration", "server. Are you connected to the Internet?') return 1 elif", "docker, podman, proxy') return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "cp_images.returncode != 0: print('ERROR: given image name is not recognized", "== 0: findings = [WorkspaceInstance.inspect_instance()] else: findings = [] for", "output_format not in ['yaml', 'json']: print('output format unrecognized: {}'.format(argv_parsed.output_format)) return", "action cannot be undone. ' '[y/N] ').format(wdid), end='') ui_input =", "no workspace deployment in local configuration.')) return config, None, 1", "ac.dissolve_registration(wdid) except: print('Error occurred while contacting remote server ' 'at", "`hardshare rules` with a switch. For example, `hardshare rules -l`')", "that' ' are not valid; to get list of' '", "for inclusion in container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside', default=None, help='add", "terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "it remotely as' ' terminated and attempt local clean-up; this'", "dissolve {}? This action cannot be undone. ' '[y/N] ').format(wdid),", "registration of more than 1 wdeployment; ' 'default is to", "os.path.exists(normalized_path): print('ERROR: given path does not exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path)", "['docker', 'podman', 'proxy']: print('unknown cprovider: {}'.format(cprovider)) return 1 if cprovider", "elif argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help()", "'wrong authorization token': print('wrong API token. Did it expire?') else:", "container provider: docker, podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img', default=None,", "known as capabilities or permissions)' rules_parser = subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc)", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc carg =", "'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'] = [] modify_local(config)", "`hardshare config --create`)') return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "help='assign image for cprovider to use (advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH',", "== 0: print('\\t(none)') else: for wdeployment in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner:", "index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc", "ui_input in ('n', 'no', ''): return 1 try: res =", "} if 'local' in config: ref = config['local']['wdeployments'] else: ref", "1 elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is not None: assert", "output_format == 'yaml': print(yaml.dump(config, default_flow_style=False)) else: if 'local' not in", "help='only show local configuration data') config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved', help='include", "instance is active,' ' but there is not one or", "default=False, dest='rm_init_inside', help='remove (empty) list of commands for inside initialization')", "# cprovider == 'proxy' print('ERROR: --assign-image not supported for cprovider", "%(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if argv is None: argv", "with errors, try `--list`')) config_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_config', help='list", "while ui_input not in ('y', 'yes'): print('Do you want to", "commands to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove program from", "dest='prune_err_keys', help=('delete files in local key directory that' ' are", "`hardshare addon-cmdsh` with a switch.') print('To get a help message,", "findings = [WorkspaceInstance.inspect_instance()] else: findings = [] for wd in", "get a help message, enter\\n\\n hardshare help config') return 1", "user account:') for wd in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if", "index is None: print('ERROR: given prefix does not match precisely", "default=0, type=int, help=('on Linux, 0 typically implies /dev/video0; ' 'if", "config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside', default=None, help='add command to be executed", "'copy-and-paste value shown in `hardshare config -l` here')) config_parser.add_argument('--add-key', metavar='FILE',", "config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside', help='remove (empty) list of commands for", "valid keys available. Check: `hardshare config -l`') return 1 with", "get a help message, enter\\n\\n hardshare help rules') return 1", "config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove program from list of commands", "action='store_true', default=False, dest='drop_all_rules', help=('remove all access rules; ' 'note that", "not supported for cprovider `proxy`') return 1 elif cprovider not", "permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub' )) return 1 elif argv_parsed.create_config:", "try: res = ac.get_access_rules(wdid) except Exception as err: print('{}'.format(err)) return", "action='store_true', default=False, help='add add-on cmdsh to enable terminal access via", "WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc", "and argv_parsed.help_target_command is not None: if argv_parsed.help_target_command == 'config': config_parser.print_help()", "of workspace deployment to dissolve') status_commanddesc = 'get status of", "special formatting); ' 'options: YAML , JSON'), dest='output_format') subparsers =", "'if there is only 1 workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str,", "possibly new local config try: assert ac is not None", "at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "config['wdeployments'] for jj, wdeployment in enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Does it exist? (Try `hardshare status`)') return 1 return 0", "description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment'", "this workspace deployment, making it' ' unavailable for any future", "formatting); ' 'options: YAML , JSON'), dest='output_format') subparsers = argparser.add_subparsers(dest='command')", "' 'if there is only 1 workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true',", "os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path): print('ERROR: given path does not exist')", "== 'json': print(json.dumps(res)) else: # output_format == 'yaml' print(yaml.dump(res, default_flow_style=False))", "from .api import HSAPIClient from .err import Error as HSError", "as err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'addon-vnc':", "default=False, help='print version of hardshare (this) package.', dest='print_version') argparser.add_argument('-v', '--verbose',", "all attached cameras associated with this ' 'user account, whether", "' 'this argument is not required ' 'if there is", "the Internet?') return 1 elif argv_parsed.command == 'stop-cameras': local_keys =", "switch. For example, `hardshare rules -l`') print('or to get a", "robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc", "rm_wd(get_local_config(), wdid, save=True) elif argv_parsed.command == 'config': if argv_parsed.list_config: try:", "ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path is not None: config, index, rc", "add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else: print('Use `hardshare", "try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception as", "if argv_parsed.become_daemon: if os.fork() != 0: return 0 os.close(0) os.close(1)", "collect_errors=True) except: print('error loading configuration data.' ' does it exist?", "if len(id_prefix) == 0: if len(config['wdeployments']) > 1: print('ERROR: ambiguous", "https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else: print('Use `hardshare addon-vnc` with", "key directory that' ' are not valid; to get list", "'if there is only 1 workspace deployment')) config_parser.add_argument('-c', '--create', action='store_true',", "image for cprovider to use (advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str,", "occurred while contacting remote server ' 'at {}'.format(ac.base_uri)) return 1", "argv is None: argv = sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line interface'", "default=False, dest='add_rule_permit_all', help='permit instantiations by anyone') register_commanddesc = 'register new", "errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except: print('failed", "description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace", "subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target", "file except in compliance with the License. # You may", "return rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path): print('ERROR: given", "metavar='CMD', type=str, dest='add_init_inside', default=None, help='add command to be executed inside", "attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None, help=('id of workspace deployment on which", "== 'json': print(json.dumps(findings)) else: # output_format == 'yaml' print(yaml.dump(findings, default_flow_style=False))", "print('--purge not supported for cprovider `proxy`') return 1 elif cprovider", "cmdsh to enable terminal access via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true',", "Internet?') return 1 elif argv_parsed.command == 'rules': if ac is", "only 1 workspace deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from", "from .addons import add_cmdsh, rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy def", "= ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as err: print('Error: {}'.format(err)) return 1", "denied by default, ' 'including to you (the owner)')) rules_parser.add_argument('--permit-all',", "fp: tok = fp.read().strip() try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif", "argv_parsed.remove_raw_device_path is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "elif argv_parsed.prune_err_keys: _, errored_keys = list_local_keys(collect_errors=True) for err_key_path, err in", "'--prune', action='store_true', default=False, dest='prune_err_keys', help=('delete files in local key directory", "1 if len(config['wdeployments']) == 0: print(('ERROR: no workspace deployment in", "for idp in id_prefix: index = find_wd(config, idp) if index", "that access is denied by default, ' 'including to you", "' (try `hardshare config --create`)') return 1 config, index, rc", "= ac.dissolve_registration(wdid) except: print('Error occurred while contacting remote server '", "stderr=subprocess.DEVNULL) if cp_images.returncode != 0: print('ERROR: given image name is", "print('{}'.format(err)) return 1 elif argv_parsed.add_rule_permit_all: ui_input = None while ui_input", "rc != 0: return rc wdeployments = [config['wdeployments'][jj]['id'] for jj", "argv_parsed.command == 'register': if ac is None: print('cannot register without", "0: return rc config['wdeployments'][index]['cprovider'] = selected_cprovider if selected_cprovider == 'proxy':", "metavar='ID', nargs='?', default=None, help=('id of workspace deployment to advertise' '", "deployments defined in local configuration:') if len(config['local']['wdeployments']) == 0: print('\\t(none)')", "print('--rm-init-inside not supported for cprovider `proxy`') return 1 elif cprovider", "your workspace deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID',", "== 'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command == 'status': status_parser.print_help() elif argv_parsed.help_target_command", "print('\\t(none)') else: print('\\t' + '\\n\\t'.join(config['local']['keys'])) if 'err_keys' in config['local'] and", "found. (try `hardshare config -h`)') return 1 try: res =", "{}'.format(err, err_key_path)) if config['remote']: if 'err' in config['remote']: print('Error occurred", "deployment defined.') return config, None, 1 index = [0] else:", "'ad': advertise_parser.print_help() elif argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help() else: argparser.print_help() else:", "== 'wrong authorization token': print('wrong API token. Did it expire?')", "indices = [] for idp in id_prefix: index = find_wd(config,", "formatting (default is no special formatting); ' 'options: YAML ,", "default=True, dest='register_at_most_one', help=('permit registration of more than 1 wdeployment; '", "`hardshare config` with a switch. For example, `hardshare config -l`')", "argparser = argparse.ArgumentParser(description=('Command-line interface' ' for the hardshare client'), add_help=False)", "elif argv_parsed.declared_wdeployment_id is not None: assert ac is not None", "if argv is None: argv = sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line", "not match precisely 1 workspace deployment') return config, None, 1", "'rt') as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_mistyproxy: if", "rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path): print('ERROR: given path", "{}'.format(err)) return 1 elif argv_parsed.command == 'ad': if ac is", "else: try: wdid = str(uuid.UUID(argv_parsed.id_prefix)) except: config, index, rc =", "# output_format == 'yaml' print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command == 'attach-camera':", "KIND, either express or implied. # See the License for", "config_parser.add_argument('-p', '--prune', action='store_true', default=False, dest='prune_err_keys', help=('delete files in local key", "return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-init-inside", "to the Internet?') return 1 elif argv_parsed.command == 'stop-cameras': local_keys", "= '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside is not None: config,", "addon-cmdsh') return 1 except ValueError as err: print('ERROR: {}'.format(err)) return", "on the supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None, help=('image", "help=('remove all access rules; ' 'note that access is denied", "config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add path to SSH key pair (does", "configuration.' ' (try `hardshare config --create`)') return 1 config, index,", "print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'],", "help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment to", "deployment is' ' hosted here. (this only works if it'", "help='add command to be executed inside container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False,", "explicitly permits it.', ] if output_format == 'json': print(json.dumps(res)) else:", "tok = fp.read().strip() try: if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is None:", "'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed to stop container", "registered workspace deployments with this user account') else: print('\\nregistered workspace", "help addon-cmdsh') return 1 except ValueError as err: print('ERROR: {}'.format(err))", "add_help=False) argparser.add_argument('-h', '--help', dest='print_help', action='store_true', default=False, help='print this help message", "attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None, help=('image crop configuration; ' 'default:", "else: print('Use `hardshare config` with a switch. For example, `hardshare", "rules -l`') print('or to get a help message, enter\\n\\n hardshare", "(the \"License\"); # you may not use this file except", "config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings = [] for m in find_wd(config,", "= 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format == 'json': print(json.dumps(config)) elif output_format ==", "return rc wdeployments = [config['wdeployments'][jj]['id'] for jj in indices] local_keys", "if argv_parsed.print_version or argv_parsed.command == 'version': from . import __version__", "token': print('wrong API token. Did it expire?') else: print(config['remote']['err']) return", "does not exist or ' 'has the wrong permissions.'.format( argv_parsed.new_ssh_path,", "print('or to get a help message, enter\\n\\n hardshare help config')", "config, None, 1 if isinstance(id_prefix, list): if len(id_prefix) == 0:", "None, 1 index = 0 return config, index, 0 def", "for the hardshare client'), add_help=False) argparser.add_argument('-h', '--help', dest='print_help', action='store_true', default=False,", "1 workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add add-on cmdsh to", "new local config try: assert ac is not None remote_config", "in container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside', default=None, help='add command to", "subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed to stop", "# # Unless required by applicable law or agreed to", "config -l`') return 1 with open(local_keys[0], 'rt') as fp: tok", "a help message, enter\\n\\n hardshare help rules') return 1 elif", "is denied by default, ' 'including to you (the owner)'))", "workspace deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address) of registration: {}'.format(res['origin']))", "(address) of registration: {}' .format(wd['origin'])) if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif", "action='store_true', default=False, dest='include_dissolved', help='include configuration data of dissolved workspace deployments')", "if present rm_wd(get_local_config(), wdid, save=True) elif argv_parsed.command == 'config': if", "remotely as' ' terminated and attempt local clean-up; this' '", "print('cannot register without initial local configuration.' ' (try `hardshare config", "import logging.handlers import os import os.path import subprocess import sys", "# Try to get remote config, given possibly new local", "following: docker, podman, proxy') return 1 config, index, rc =", "return 0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is not None:", "is only 1 workspace deployment')) config_parser.add_argument('-c', '--create', action='store_true', default=False, dest='create_config',", "if len(config['local']['keys']) == 0: print('\\t(none)') else: print('\\t' + '\\n\\t'.join(config['local']['keys'])) if", "if argv_parsed.help_target_command == 'config': config_parser.print_help() elif argv_parsed.help_target_command == 'rules': rules_parser.print_help()", "argv_parsed.command == 'help': if hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command is not", "' are not valid; to get list of' ' files", "'remote': remote_config, } if 'local' in config: ref = config['local']['wdeployments']", "implied. # See the License for the specific language governing", "rm_wd from .api import HSAPIClient from .err import Error as", "if ac is None: print('cannot terminate without valid API client')", "width, height = None, None if argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config)", "found': print('not found: workspace deployment with id prefix {}' .format(res['id_prefix']))", "configuration; ' 'default: all wdeployments get full images')) attach_camera_parser.add_argument('-d', '--daemon',", "to execute; ' 'for example, ' 'copy-and-paste value shown in", "('dissolve this workspace deployment, making it' ' unavailable for any", "= None if argv_parsed.command == 'status': try: config = get_local_config()", "WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in findings: try: subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']],", "list of' ' files with errors, try `--list`')) config_parser.add_argument('-l', '--list',", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc config['wdeployments'][index]['cprovider'] = selected_cprovider", "'addon-cmdsh': if ac is None: print('cannot register without initial local", "workspace deployment' ' (can be unique prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true',", "ValueError as err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command ==", "elif argv_parsed.add_init_inside is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "register_parser = subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one', help=('permit", "hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command is not None: if argv_parsed.help_target_command ==", "(pid: {});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id'])", "'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command ==", "= None while ui_input not in ('y', 'yes'): print('Do you", "{} or {} does not exist or ' 'has the", "pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if os.fork() != 0: return 0 os.close(0)", "if not os.path.exists(normalized_path): print('ERROR: given path does not exist') return", "instance, then' ' stop it without waiting'), dest='force_terminate') help_message_purge =", "config --create`)') return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "add-on cmdsh for your workspace deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc,", "local configuration is found, then create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog',", "rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules', help=('remove all access rules; ' 'note", "invoking terminal (i.e., run as daemon)', dest='become_daemon') attach_camera_commanddesc = 'attach", "if output_format == 'json': print(json.dumps(findings)) else: # output_format == 'yaml'", "print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'addon-mistyproxy': if ac", "len(config['local']['keys']) == 0: print('\\t(none)') else: print('\\t' + '\\n\\t'.join(config['local']['keys'])) if 'err_keys'", "1 if cprovider == 'podman': cp_images = subprocess.run([cprovider, 'image', 'exists',", "dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc =", "metavar='COMMAND', type=str, nargs='?') config_commanddesc = 'manage local and remote configuration'", "else: pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname, mode='a', delay=True)", "help=('width and height of captured images; ' 'default depends on", "' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if argv is None:", "0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is not None: output_format", "configuration' config_parser = subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "mistyproxy for your workspace deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc)", "loading configuration data.' ' does it exist? is it broken?')", "wdeployment['terminate']: print('\\tterminate:') for terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if", "elif argv_parsed.command == 'attach-camera': config, indices, rc = get_config_with_index(argv_parsed.id_prefix) if", "1 if not argv_parsed.only_local_config: # Try to get remote config,", "'--help', dest='print_help', action='store_true', default=False, help='print this help message and exit')", "else: print('Use `hardshare addon-mistyproxy` with a switch.') print('To get a", "None: print('cannot terminate without valid API client') return 1 try:", "res['err'] == 'wrong authorization token': print('wrong API token. Did it", "argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--purge not", "return 1 return 0 else: print('failed to detect local instance')", "dest='rm_terminate_prog', default=None, help=('remove program from list of commands to execute;", "deployment in local configuration.')) return config, None, 1 if isinstance(id_prefix,", "tok) else: print('Use `hardshare addon-vnc` with a switch.') print('To get", "(Try `hardshare status`)') return 1 return 0 elif argv_parsed.command ==", "in ('y', 'yes'): print(('Do you want to dissolve {}? This", "config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside is not None: config, index, rc", "want to dissolve {}? This action cannot be undone. '", "to get remote config, given possibly new local config try:", "os.close(2) try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width, height=height, crop=crop) except ConnectionError:", "not appear to be valid.') return 1 wdid = config['wdeployments'][index]['id']", "= '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path is not None: config,", "instance to finish' terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID',", "Unless required by applicable law or agreed to in writing,", "as daemon)', dest='become_daemon') attach_camera_commanddesc = 'attach camera stream to workspace", "return config, None, 1 else: if len(config['wdeployments']) > 1: print('ERROR:", "from . import __version__ as hardshare_pkg_version print(hardshare_pkg_version) return 0 elif", "the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "def get_config_with_index(id_prefix=None): try: config = get_local_config() except: print('error loading configuration", "= subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "'rules': if ac is None: print('no local configuration found. (try", "config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config', help='only show local configuration data') config_parser.add_argument('--include-dissolved',", "dest='all_cameras') addon_cmdsh_commanddesc = 'manage add-on cmdsh for your workspace deployments'", "return 1 return 0 elif argv_parsed.command == 'register': if ac", "the specific language governing permissions and # limitations under the", "list): if len(id_prefix) == 0: if len(config['wdeployments']) > 1: print('ERROR:", "argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else: print('Use `hardshare", "for err_key_path, err in config['local']['err_keys'].items(): print('\\t {}: {}'.format(err, err_key_path)) if", "def main(argv=None): pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a',", "there is only 1 workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add", "appear to be valid.') return 1 ui_input = None while", "else: # output_format == 'yaml' print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command ==", "vnc for your workspace deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc)", "podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img', default=None, help='assign image for", "cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'] = [] modify_local(config) elif argv_parsed.cprovider", "if cprovider == 'proxy': print('--add-init-inside not supported for cprovider `proxy`')", "cprovider = config['wdeployments'][index]['cprovider'] if cprovider not in ['docker', 'podman', 'proxy']:", "local configuration.' ' (try `hardshare config --create`)') return 1 try:", "modify_local(config) elif argv_parsed.add_terminate_prog is not None: config, index, rc =", "return 1 else: print('Use `hardshare rules` with a switch. For", "access by anyone? [y/N] ', end='') ui_input = input().lower() if", "in config['local'] and len(config['local']['err_keys']) > 0: print('found possible keys with", "argv = sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line interface' ' for the", "selected_cprovider \\in {docker, podman} if config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image'] =", "run as daemon)', dest='become_daemon') attach_camera_commanddesc = 'attach camera stream to", "os.fork() != 0: return 0 os.close(0) os.close(1) os.close(2) try: camera_main(wdeployments,", "err: print('{}'.format(err)) return 1 else: print('Use `hardshare rules` with a", "0: return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider not in", "config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img is not None: config,", "return 1 except ValueError as err: print('ERROR: {}'.format(err)) return 1", "to enable VNC via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove", "dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None, help='id", "' 'if there is only 1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true',", "1 else: print('Use `hardshare rules` with a switch. For example,", "ac = None if argv_parsed.command == 'status': try: config =", "('n', 'no', ''): return 1 try: ac.add_access_rule(wdid, to_user='*') except Exception", "if rc != 0: return rc wdeployment_id = config['wdeployments'][index]['id'] local_keys", "of the following: docker, podman, proxy') return 1 config, index,", "help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace deployment'", "'json': print(json.dumps(findings)) else: # output_format == 'yaml' print(yaml.dump(findings, default_flow_style=False)) elif", "\\in {docker, podman} if config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic'", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc if 'ssh_key'", "as HSError from .addons import camera_main, stop_cameras from .addons import", "get a help message, enter\\n\\n hardshare help addon-vnc') return 1", "save=True) elif argv_parsed.command == 'config': if argv_parsed.list_config: try: config =", "an instance is active,' ' but there is not one", "print('--add-init-inside not supported for cprovider `proxy`') return 1 elif cprovider", "help='detach from invoking terminal (i.e., run as daemon)', dest='become_daemon') attach_camera_commanddesc", "register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one', help=('permit registration of more than 1", "' terminated and attempt local clean-up; this' ' command is", "metavar='ID', nargs='?', default=None, help=('id of workspace deployment' ' (can be", "permissions)' rules_parser = subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "config, None, 1 else: if len(config['wdeployments']) > 1: print('ERROR: ambiguous", "argv_parsed.output_format is not None: output_format = argv_parsed.output_format.lower() if output_format not", "modify_local(config) elif argv_parsed.cprovider is not None: selected_cprovider = argv_parsed.cprovider.lower() if", "deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "deployment' ' (can be unique prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true', default=False,", "allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR: failed to reach server. Are you", "selected_cprovider == 'proxy': config['wdeployments'][index]['image'] = None else: # selected_cprovider \\in", "(try `hardshare config -h`)') return 1 try: wdid = str(uuid.UUID(argv_parsed.wdid))", "advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment to advertise'", "ac.add_access_rule(wdid, to_user='*') except Exception as err: print('{}'.format(err)) return 1 else:", "from .mgmt import get_local_config, add_key, add_ssh_path, list_local_keys from .mgmt import", "elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is not None: assert ac", "1 workspace deployment defined.') return config, None, 1 index =", "NOT copy the key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path', default=None, help='add", "rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--rm-init-inside not", "dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv) if argv_parsed.print_version or argv_parsed.command == 'version':", "permits it.', ] if output_format == 'json': print(json.dumps(res)) else: #", "status of local instances and daemon' status_parser = subparsers.add_parser('status', description=status_commanddesc,", "aiohttp.client_exceptions import ClientConnectorError as ConnectionError from .core import WorkspaceInstance from", "= subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "by the hardshare client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None, type=str, help=('special", "try `hardshare' ' terminate` without --purge.') terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge,", "0 return config, index, 0 def main(argv=None): pkglogger = logging.getLogger('hardshare')", "config['wdeployments'][index]['image'] = None else: # selected_cprovider \\in {docker, podman} if", "help='add add-on mistyproxy to allow HTTP proxy to Misty robots',", "stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False, help=('stop", "== 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command == 'ad': advertise_parser.print_help() elif argv_parsed.help_target_command", "active instance, then' ' stop it without waiting'), dest='force_terminate') help_message_purge", "if len(config['wdeployments']) > 1: print('ERROR: ambiguous command: more than 1", "to dissolve') status_commanddesc = 'get status of local instances and", "stop_cameras_parser.print_help() elif argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command == 'addon-vnc':", "is None: argv = sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line interface' '", "nargs='?', default=None, help='id of workspace deployment to dissolve') status_commanddesc =", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc cprovider = config['wdeployments'][index]['cprovider']", "elif argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help()", "help=('remove program from list of commands to execute; ' 'for", "elif argv_parsed.remove_raw_device_path is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "subprocess import sys import uuid import yaml from aiohttp.client_exceptions import", "for wdeployment in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'],", "addon_mistyproxy_commanddesc = 'manage add-on mistyproxy for your workspace deployments' addon_mistyproxy_parser", "unavailable for any future use' ' (THIS CANNOT BE UNDONE)')", "None, 1 indices.append(index) index = indices elif id_prefix: index =", "== 'version': from . import __version__ as hardshare_pkg_version print(hardshare_pkg_version) return", "deployment to dissolve') status_commanddesc = 'get status of local instances", "one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format == 'json': print(json.dumps(findings)) else: # output_format", "uuid import yaml from aiohttp.client_exceptions import ClientConnectorError as ConnectionError from", "try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width, height=height, crop=crop) except ConnectionError: if", "''): return 1 try: res = ac.dissolve_registration(wdid) except: print('Error occurred", "!= 0: print('The given ID does not appear to be", "workspace deployment, making it' ' unavailable for any future use'", "account, whether or not started on this host'), dest='all_cameras') addon_cmdsh_commanddesc", "argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help() elif", "language governing permissions and # limitations under the License. \"\"\"Command-line", "you want to permit access by anyone? [y/N] ', end='')", "Did it expire?') else: print(res['err']) return 1 # Remove from", "owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all', help='permit instantiations by anyone') register_commanddesc", "= { 'local': config, 'remote': None, } print('workspace deployments defined", "!= 0: return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider ==", "with open(local_keys[0], 'rt') as fp: tok = fp.read().strip() if argv_parsed.attach_camera_res:", "else: print('\\nregistered workspace deployments with this user account:') for wd", "registration: {}' .format(wd['origin'])) if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _,", "ac is not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path is not", "and remote configuration' config_parser = subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID',", "HSError as err: print('Error: {}'.format(err)) return 1 except: print('Error occurred", "{}' .format(res['id_prefix'])) elif res['err'] == 'wrong authorization token': print('wrong API", "try: assert ac is not None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except", "{}' .format(wd['origin'])) if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _, errored_keys", "'--force', action='store_true', default=False, help=('if there is an active instance, then'", "== 'podman': cp_images = subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img]) if cp_images.returncode", "by cprovider') return 1 elif cprovider == 'docker': cp_images =", "import os.path import subprocess import sys import uuid import yaml", "help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace deployment'", "rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace deployment' '", "file does not exist') return 1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg)", "index = [0] else: indices = [] for idp in", "default_flow_style=False)) elif argv_parsed.command == 'attach-camera': config, indices, rc = get_config_with_index(argv_parsed.id_prefix)", "'date_dissolved' in res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command == 'dissolve': if", "path to SSH key pair (does NOT copy the key)')", "target workspace deployment' ' (can be unique prefix); ' 'this", "this' ' command is a last resort. First, try `hardshare'", "is only 1 workspace deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach", "default=None, help='IP address of the Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true',", "config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--rm-init-inside not supported for cprovider", "except ConnectionError: if not argv_parsed.become_daemon: print('ERROR: failed to reach server.", "deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add add-on vnc to enable VNC", "config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as err: print('Error: {}'.format(err))", "try: if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is None: print('--ip is required", "err: print('ERROR: {}'.format(err)) return 1 except ConnectionError: print('ERROR: failed to", "mark it remotely as' ' terminated and attempt local clean-up;", "return 1 try: res = ac.dissolve_registration(wdid) except: print('Error occurred while", "addon_cmdsh_commanddesc = 'manage add-on cmdsh for your workspace deployments' addon_cmdsh_parser", "if output_format not in ['yaml', 'json']: print('output format unrecognized: {}'.format(argv_parsed.output_format))", "else: findings = [] for m in find_wd(config, argv_parsed.id_prefix, one_or_none=False):", "= [ 'Access is denied unless a rule explicitly permits", "Did it expire?') else: print(res['err']) return 1 else: print('summary of", "config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-init-inside not supported for cprovider", "Are you connected to the Internet?') return 1 elif argv_parsed.command", "a help message, enter\\n\\n hardshare help addon-mistyproxy') return 1 except", "not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside']", "argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode != 0: print('ERROR: given image", "the Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on mistyproxy',", "help='add new account key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add path to", "local configuration.')) return config, None, 1 if isinstance(id_prefix, list): if", "if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else: print('Use", "' (try `hardshare config --create`)') return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except", "{}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _, errored_keys = list_local_keys(collect_errors=True) for err_key_path, err", ".addons import camera_main, stop_cameras from .addons import add_cmdsh, rm_cmdsh, add_vnc,", "return config, None, 1 indices.append(index) index = indices elif id_prefix:", "print('output format unrecognized: {}'.format(argv_parsed.output_format)) return 1 else: output_format = None", "return 1 elif argv_parsed.command == 'check': if ac is None:", "You may obtain a copy of the License at #", "['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 if not os.path.exists(argv_parsed.raw_device_path):", "local key directory that' ' are not valid; to get", "action='store_true', default=False, help=('stop all attached cameras associated with this '", "all wdeployments get full images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach", "argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format == 'json': print(json.dumps(findings)) else: #", "!= 0: return 0 os.close(0) os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname", "wdeployment in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'],", "index = indices elif id_prefix: index = find_wd(config, id_prefix) if", "add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else: print('Use `hardshare addon-vnc`", "expire?') else: print(config['remote']['err']) return 1 if len(config['remote']['deployments']) == 0: print('\\nno", "if config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img", "does it exist?') return None, None, 1 if len(config['wdeployments']) ==", "fp: tok = fp.read().strip() try: if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is", "is None: if len(config['wdeployments']) == 0: findings = [WorkspaceInstance.inspect_instance()] else:", "-h`)') return 1 try: res = ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred", "== 'config': if argv_parsed.list_config: try: config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except:", "if rc != 0: print('The given ID does not appear", "elif argv_parsed.command == 'config': if argv_parsed.list_config: try: config = get_local_config(create_if_empty=argv_parsed.create_config,", "= ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred while contacting remote server '", "'proxy': print('--purge not supported for cprovider `proxy`') return 1 elif", "contacting remote server.') if config['remote']['err'] == 'wrong authorization token': print('wrong", "no local configuration is found, then create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH',", "commands for inside initialization') config_parser.add_argument('-p', '--prune', action='store_true', default=False, dest='prune_err_keys', help=('delete", "local instances and daemon' status_parser = subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix',", "type=str, dest='attach_camera_crop_config', default=None, help=('image crop configuration; ' 'default: all wdeployments", "help='id of workspace deployment to dissolve') status_commanddesc = 'get status", "return 1 else: print('summary of workspace deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created']))", "dest='list_config', help='list configuration') config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config', help='only show local", "add key') return 1 elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR:", "workspace deployment defined.') return config, None, 1 index = 0", "all access rules; ' 'note that access is denied by", "new workspace deployment' register_parser = subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false',", "addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add add-on vnc to enable VNC via", "1 except: print('Error occurred while contacting rerobots servers') print('Try config", "nargs='?', default=None, help=('id of workspace deployment to advertise' ' (can", "action='store_true', default=False, help='print verbose messages about actions by the hardshare", "not None: selected_cprovider = argv_parsed.cprovider.lower() if selected_cprovider not in ['docker',", "open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_cmdsh:", "0 elif argv_parsed.command is None or argv_parsed.command == 'help': if", "rules_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_rules', help='list all rules') rules_parser.add_argument('--permit-me', action='store_true',", "remote configuration' config_parser = subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "instances and daemon' status_parser = subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID',", "add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None): try: config = get_local_config()", "'advertise availability, accept new instances' advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc)", "ui_input = None while ui_input not in ('y', 'yes'): print('Do", "config['wdeployments'][index]['id'] if argv_parsed.list_rules: try: res = ac.get_access_rules(wdid) except Exception as", "else: if len(config['wdeployments']) > 1: print('ERROR: ambiguous command: more than", "nargs='*', default=None, help=('id of workspace deployment on which to attach'", "argv_parsed.add_terminate_prog is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "configuration already ' 'has wdeployment declared')) check_commanddesc = 'check registration", "= 0 return config, index, 0 def main(argv=None): pkglogger =", "action='store_true', default=False, help='add add-on mistyproxy to allow HTTP proxy to", "as fp: tok = fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError:", "declare SSH key.\\n' 'Instances with connection type sshtun cannot launch.')", "1 workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add add-on vnc to", "ui_input not in ('y', 'yes'): print('Do you want to permit", "return 1 try: ac.add_access_rule(wdid, to_user='*') except Exception as err: print('{}'.format(err))", "unique prefix)')) advertise_commanddesc = 'advertise availability, accept new instances' advertise_parser", "argv_parsed.cprovider_img]) if cp_images.returncode != 0: print('ERROR: given image name is", "dest='new_ssh_path', help='add path to SSH key pair (does NOT copy", "'attach-camera': config, indices, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0:", "return 1 if cprovider == 'podman': cp_images = subprocess.run([cprovider, 'image',", "type=str, nargs='?') config_commanddesc = 'manage local and remote configuration' config_parser", "help='print this help message and exit') argparser.add_argument('-V', '--version', action='store_true', default=False,", "check_parser.print_help() elif argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command == 'status':", "for configuration changes' ' (can be unique prefix); ' 'this", "data of dissolved workspace deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare", "json.loads(argv_parsed.attach_camera_crop_config) else: crop = None if argv_parsed.become_daemon: if os.fork() !=", "try: res = ac.dissolve_registration(wdid) except: print('Error occurred while contacting remote", "elif cprovider not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return", "print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address) of registration: {}'.format(res['origin'])) if 'date_dissolved' in", "help='add program to list of commands to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH',", "main(argv=None): pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True)", "with open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try: if", "metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove program from list of commands to", "it without waiting'), dest='force_terminate') help_message_purge = ('if the server indicates", "{ 'local': config, 'remote': remote_config, } if 'local' in config:", "'err' in config['remote']: print('Error occurred while contacting remote server.') if", "return 0 else: print('failed to detect local instance') return 1", "addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "== 'stop-ad': config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc !=", "files with errors, try `--list`')) config_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_config',", "1 else: output_format = None try: ac = HSAPIClient() except:", "try: wdid = str(uuid.UUID(argv_parsed.wdid)) except: print('The given ID does not", "an active instance, then' ' stop it without waiting'), dest='force_terminate')", "License. # You may obtain a copy of the License", "== 'addon-vnc': if ac is None: print('cannot register without initial", "argv_parsed.attach_camera_res.split(',')] if width < 1 or height < 1: print('Width,", "return 1 elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {} or", "help=('id of workspace deployment' ' (can be unique prefix); '", "configuration data. does it exist?') return 1 if argv_parsed.id_prefix is", "pkglogger.addHandler(loghandler) if argv is None: argv = sys.argv[1:] argparser =", "required ' 'if there is only 1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add',", "== 'proxy': print('--purge not supported for cprovider `proxy`') return 1", "== 'stop-cameras': local_keys = list_local_keys() if len(local_keys) < 1: print('No", "dest='print_help', action='store_true', default=False, help='print this help message and exit') argparser.add_argument('-V',", "registered under' ' the same user account.)')) rules_commanddesc = 'modify", "== 0: print('\\t(none)') else: print('\\t' + '\\n\\t'.join(config['local']['keys'])) if 'err_keys' in", "cprovider: {}'.format(cprovider)) return 1 if not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device", "advertise_parser.print_help() elif argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help() else: argparser.print_help() else: argparser.print_help()", "if output_format == 'json': print(json.dumps(config)) elif output_format == 'yaml': print(yaml.dump(config,", "'modify access rules (also known as capabilities or permissions)' rules_parser", "switch.') print('To get a help message, enter\\n\\n hardshare help addon-mistyproxy')", "if wdeployment['terminate']: print('\\tterminate:') for terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:')", "workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add add-on cmdsh to enable", "1 ui_input = None while ui_input not in ('y', 'yes'):", "print('\\nno registered workspace deployments with this user account') else: print('\\nregistered", "deployment' ' (can be unique prefix)')) advertise_commanddesc = 'advertise availability,", "metavar='ID', nargs='?', default=None, help='id of workspace deployment to dissolve') status_commanddesc", "crop=crop) except ConnectionError: if not argv_parsed.become_daemon: print('ERROR: failed to reach", "len(id_prefix) == 0: if len(config['wdeployments']) > 1: print('ERROR: ambiguous command:", "pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG)", "config, given possibly new local config try: assert ac is", "check' ' (can be unique prefix)')) dissolve_commanddesc = ('dissolve this", "rm_mistyproxy(wdeployment_id, tok) else: print('Use `hardshare addon-mistyproxy` with a switch.') print('To", "rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-raw-device not", "' (can be unique prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true', default=False, help=('if", "print('Use `hardshare rules` with a switch. For example, `hardshare rules", "elif argv_parsed.help_target_command == 'check': check_parser.print_help() elif argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help()", "not None: assert ac is not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif", "os.path import subprocess import sys import uuid import yaml from", "making it' ' unavailable for any future use' ' (THIS", "print('{}'.format(err)) return 1 else: print('Use `hardshare rules` with a switch.", "rc != 0: return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider", "match precisely 1 workspace deployment') return config, None, 1 else:", "config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return", "print(json.dumps(config)) elif output_format == 'yaml': print(yaml.dump(config, default_flow_style=False)) else: if 'local'", "dest='cprovider_img', default=None, help='assign image for cprovider to use (advanced option)')", "= str(uuid.UUID(argv_parsed.id_prefix)) except: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "ID does not appear to be valid.') return 1 ui_input", "subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace", "precisely 1 workspace deployment') return config, None, 1 indices.append(index) index", "[] for m in find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format", "'proxy']: print('unknown cprovider: {}'.format(cprovider)) return 1 if cprovider == 'podman':", "this help message and exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?') config_commanddesc", "except: ac = None if argv_parsed.command == 'status': try: config", "deployment to advertise' ' (can be unique prefix); ' 'this", "rm_vnc(wdeployment_id, tok) else: print('Use `hardshare addon-vnc` with a switch.') print('To", "elif argv_parsed.raw_device_path is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "sshtun cannot launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if os.fork() != 0:", "required with --add') return 1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy:", "ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command == 'stop-ad': config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "it expire?') else: print(config['remote']['err']) return 1 if len(config['remote']['deployments']) == 0:", "subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target", "does not appear to be valid.') return 1 ui_input =", "else: ref = config['wdeployments'] for jj, wdeployment in enumerate(ref): ref[jj]['url']", "the Internet?') return 1 elif argv_parsed.command == 'addon-cmdsh': if ac", "1 if not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device file does not", ". import __version__ as hardshare_pkg_version print(hardshare_pkg_version) return 0 elif argv_parsed.command", "--assign-image not supported for cprovider `proxy`') return 1 config['wdeployments'][index]['image'] =", "return 1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog is not", "`hardshare config -l` here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add new account", "None try: ac = HSAPIClient() except: ac = None if", "('y', 'yes'): print('Do you want to permit access by anyone?", "--add') return 1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok)", "').format(wdid), end='') ui_input = input().lower() if ui_input in ('n', 'no',", "argv_parsed.command == 'config': if argv_parsed.list_config: try: config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True)", "config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare that workspace deployment is' '", "is not None: if argv_parsed.help_target_command == 'config': config_parser.print_help() elif argv_parsed.help_target_command", "(try `hardshare config -h`)') return 1 if argv_parsed.id_prefix is None:", "argv_parsed.declared_wdeployment_id is not None: assert ac is not None ac.declare_existing(argv_parsed.declared_wdeployment_id)", "registration of this workspace deployment' check_parser = subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc)", "via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on vnc', dest='rm_addon_vnc')", "deployment' register_parser = subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one',", "wdeployment; ' 'default is to fail if local configuration already", "'docker': cp_images = subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if", "= 'get status of local instances and daemon' status_parser =", "action='store_true', default=False, dest='add_rule_permit_all', help='permit instantiations by anyone') register_commanddesc = 'register", "1 elif argv_parsed.command == 'ad': if ac is None: print('cannot", "SSH key.\\n' 'Instances with connection type sshtun cannot launch.') pkglogger.removeHandler(loghandler)", "elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {} or {} does", "get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is not None: assert ac is not", "not declare SSH key.\\n' 'Instances with connection type sshtun cannot", "elif argv_parsed.rm_terminate_prog is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "workspace deployments with this user account') else: print('\\nregistered workspace deployments", "in argv_parsed.attach_camera_res.split(',')] if width < 1 or height < 1:", "proxy to Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP address", "it expire?') else: print(res['err']) return 1 # Remove from local", "dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None, type=str, help=('special output formatting (default is", "'\\n\\t'.join(config['local']['keys'])) if 'err_keys' in config['local'] and len(config['local']['err_keys']) > 0: print('found", "'rt') as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id,", "print('Error: {}'.format(err)) return 1 except: print('Error occurred while contacting rerobots", "API token. Did it expire?') else: print(res['err']) return 1 #", "name is not recognized by cprovider') return 1 elif cprovider", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "defined.') return config, None, 1 index = [0] else: indices", "help='list all rules') rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me', help='permit instantiations by", "for the specific language governing permissions and # limitations under", "1 try: wdid = str(uuid.UUID(argv_parsed.wdid)) except: print('The given ID does", "rules_commanddesc = 'modify access rules (also known as capabilities or", "robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP address of the Misty", "'local' in config: ref = config['local']['wdeployments'] else: ref = config['wdeployments']", "from .core import WorkspaceInstance from .mgmt import get_local_config, add_key, add_ssh_path,", "dest='force_terminate') help_message_purge = ('if the server indicates that an instance", "print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'addon-vnc': if ac", "return 1 except ConnectionError: print('ERROR: failed to reach server. Are", "add-on cmdsh to enable terminal access via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm',", "required by applicable law or agreed to in writing, software", "default=False, dest='drop_all_rules', help=('remove all access rules; ' 'note that access", "First, try `hardshare' ' terminate` without --purge.') terminate_parser.add_argument('--purge', action='store_true', default=False,", "subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode != 0:", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "import __version__ as hardshare_pkg_version print(hardshare_pkg_version) return 0 elif argv_parsed.command is", "'stop-ad': config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0:", "'proxy': print('--add-raw-device not supported for cprovider `proxy`') return 1 elif", "[config['wdeployments'][jj]['id'] for jj in indices] local_keys = list_local_keys() if len(local_keys)", "1 except ValueError as err: print('ERROR: {}'.format(err)) return 1 elif", "pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid:", "addon-vnc') return 1 except ValueError as err: print('ERROR: {}'.format(err)) return", "'Access is denied unless a rule explicitly permits it.', ]", "configuration found. (try `hardshare config -h`)') return 1 if argv_parsed.id_prefix", "argparse import json import logging import logging.handlers import os import", "in findings: try: subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except:", "required ' 'if there is only 1 workspace deployment')) rules_parser.add_argument('-l',", "execute; ' 'for example, ' 'copy-and-paste value shown in `hardshare", "agreed to in writing, software # distributed under the License", "only 1 workspace deployment')) rules_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_rules', help='list", "1 if isinstance(id_prefix, list): if len(id_prefix) == 0: if len(config['wdeployments'])", "= subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "argv_parsed.command is None or argv_parsed.command == 'help': if hasattr(argv_parsed, 'help_target_command')", "if argv_parsed.list_rules: try: res = ac.get_access_rules(wdid) except Exception as err:", "distributed under the License is distributed on an \"AS IS\"", "config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add new account key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path',", "if cprovider == 'proxy': print('--add-raw-device not supported for cprovider `proxy`')", "if cprovider == 'proxy': print('--rm-init-inside not supported for cprovider `proxy`')", "0: return rc wdeployment_id = config['wdeployments'][index]['id'] local_keys = list_local_keys() if", "= subparsers.add_parser('help', help='print this help message and exit') help_parser.add_argument('help_target_command', metavar='COMMAND',", "is not None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as err:", "to check' ' (can be unique prefix)')) dissolve_commanddesc = ('dissolve", "res: if res['err'] == 'wrong authorization token': print('wrong API token.", "argument is not required ' 'if there is only 1", "of workspace deployment to check' ' (can be unique prefix)'))", "dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc =", "message, enter\\n\\n hardshare help addon-cmdsh') return 1 except ValueError as", "argparser.add_argument('-V', '--version', action='store_true', default=False, help='print version of hardshare (this) package.',", "rerobots servers') print('Try config -l --local to only get local", "workspace deployment' register_parser = subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True,", "under' ' the same user account.)')) rules_commanddesc = 'modify access", "is only 1 workspace deployment')) rules_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_rules',", "is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "there is only 1 workspace deployment')) rules_parser.add_argument('-l', '--list', action='store_true', default=False,", "rules_parser.print_help() elif argv_parsed.help_target_command == 'register': register_parser.print_help() elif argv_parsed.help_target_command == 'check':", "logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' ' %(asctime)s", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc if argv_parsed.purge_supposed_instance: cprovider", "'config': config_parser.print_help() elif argv_parsed.help_target_command == 'rules': rules_parser.print_help() elif argv_parsed.help_target_command ==", "or permissions)' rules_parser = subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "of' ' files with errors, try `--list`')) config_parser.add_argument('-l', '--list', action='store_true',", "stop it without waiting'), dest='force_terminate') help_message_purge = ('if the server", "None: print('WARNING: local configuration does not declare SSH key.\\n' 'Instances", "or argv_parsed.command == 'version': from . import __version__ as hardshare_pkg_version", "except: print('failed to add key') return 1 elif argv_parsed.new_ssh_path: try:", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc wdeployments", "= get_local_config() except: print('error loading configuration data. does it exist?')", "copy the key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path', default=None, help='add device", "argv_parsed.add_rule_permit_all: ui_input = None while ui_input not in ('y', 'yes'):", "if len(config['remote']['deployments']) == 0: print('\\nno registered workspace deployments with this", "1 indices.append(index) index = indices elif id_prefix: index = find_wd(config,", "wdid = str(uuid.UUID(argv_parsed.id_prefix)) except: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "Did it expire?') else: print(res['err']) return 1 res['comments'] = [", "' (can be unique prefix)')) advertise_commanddesc = 'advertise availability, accept", "Linux, 0 typically implies /dev/video0; ' 'if you only have", "hardshare help addon-mistyproxy') return 1 except ValueError as err: print('ERROR:", "1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as err: print('ERROR: {}'.format(err)) return", "config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog is not None: config, index, rc", "workspace deployment to advertise' ' (can be unique prefix); '", "the License. \"\"\"Command-line interface \"\"\" import argparse import json import", "description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace", "as err: print('{}'.format(err)) return 1 else: print('Use `hardshare rules` with", "if selected_cprovider not in ['docker', 'podman', 'proxy']: print('ERROR: cprovider must", "list of commands for inside initialization') config_parser.add_argument('-p', '--prune', action='store_true', default=False,", "as err: print('Error: {}'.format(err)) return 1 except: print('Error occurred while", "you (the owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all', help='permit instantiations by", "description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one', help=('permit registration of more", "present in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider', default=None, help='select a", "ac = HSAPIClient() except: ac = None if argv_parsed.command ==", "\"\"\" import argparse import json import logging import logging.handlers import", "else: print('summary of workspace deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address)", "!= 0: print('ERROR: given image name is not recognized by", "metavar='PATH', type=str, dest='raw_device_path', default=None, help='add device file to present in", "print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as err: print('ERROR: {}'.format(err)) return 1 except", "in id_prefix: index = find_wd(config, idp) if index is None:", "attach_camera_commanddesc = 'attach camera stream to workspace deployments' attach_camera_parser =", "exist?') return None, None, 1 if len(config['wdeployments']) == 0: print(('ERROR:", "dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true', default=False, help='print verbose messages about actions", "JSON'), dest='output_format') subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version number and", "' the same user account.)')) rules_commanddesc = 'modify access rules", "a help message, enter\\n\\n hardshare help config') return 1 return", "while contacting rerobots servers') print('Try config -l --local to only", "enter\\n\\n hardshare help addon-mistyproxy') return 1 except ValueError as err:", "0: return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy':", "deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res', default=None, help=('width and height of", "action='store_true', default=False, help='detach from invoking terminal (i.e., run as daemon)',", "else: print(res['err']) return 1 # Remove from local configuration, if", "print('To get a help message, enter\\n\\n hardshare help addon-mistyproxy') return", "wdid = str(uuid.UUID(argv_parsed.wdid)) except: print('The given ID does not appear", "config['remote']: if 'err' in config['remote']: print('Error occurred while contacting remote", "in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 if not", "future use' ' (THIS CANNOT BE UNDONE)') dissolve_parser = subparsers.add_parser('dissolve',", "print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command == 'dissolve': if ac is None:", "cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--rm-init-inside not supported", "import subprocess import sys import uuid import yaml from aiohttp.client_exceptions", "device previously marked for inclusion in container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str,", "cprovider not in ['docker', 'podman', 'proxy']: print('unknown cprovider: {}'.format(cprovider)) return", "not supported for cprovider `proxy`') return 1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img", "message, enter\\n\\n hardshare help config') return 1 return 0 if", "findings = [] for wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings", "argv_parsed.help_target_command == 'config': config_parser.print_help() elif argv_parsed.help_target_command == 'rules': rules_parser.print_help() elif", "try: wdid = str(uuid.UUID(argv_parsed.id_prefix)) except: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "of commands to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove program", "tok) else: print('Use `hardshare addon-cmdsh` with a switch.') print('To get", ".err import Error as HSError from .addons import camera_main, stop_cameras", "findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in findings: try: subprocess.check_call([cprovider, 'rm',", "available. Check: `hardshare config -l`') return 1 with open(local_keys[0], 'rt')", "None: if argv_parsed.help_target_command == 'config': config_parser.print_help() elif argv_parsed.help_target_command == 'rules':", "print(json.dumps(findings)) else: # output_format == 'yaml' print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command", "OR CONDITIONS OF ANY KIND, either express or implied. #", "return 1 else: output_format = None try: ac = HSAPIClient()", "HTTP proxy to Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP", "the License is distributed on an \"AS IS\" BASIS, #", "config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved', help='include configuration data of dissolved workspace", "hosted here. (this only works if it' ' has been", "print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside: config,", "('if the server indicates that an instance is active,' '", "dest='cprovider', default=None, help='select a container provider: docker, podman, proxy') config_parser.add_argument('--assign-image',", "== 'check': check_parser.print_help() elif argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command", "except Exception as err: print('{}'.format(err)) return 1 if 'err' in", "' command is a last resort. First, try `hardshare' '", "modify_local(config) elif argv_parsed.rm_terminate_prog is not None: config, index, rc =", "Try to get remote config, given possibly new local config", "default=False, help='print this help message and exit') argparser.add_argument('-V', '--version', action='store_true',", "action='store_true', default=False, dest='add_rule_permit_me', help='permit instantiations by you (the owner)') rules_parser.add_argument('--drop-all',", "'ad': if ac is None: print('cannot register without initial local", "help=('if there is an active instance, then' ' stop it", "argv_parsed.command == 'status': try: config = get_local_config() except: print('error loading", "metavar='PATH', dest='new_ssh_path', help='add path to SSH key pair (does NOT", "None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img is not None:", "help=('id of workspace deployment on which to attach' ' (can", "associated with this ' 'user account, whether or not started", "= sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line interface' ' for the hardshare", "default=False, dest='include_dissolved', help='include configuration data of dissolved workspace deployments') config_parser.add_argument('--declare',", "argv_parsed.become_daemon: print('ERROR: failed to reach server. Are you connected to", "errors:') for err_key_path, err in config['local']['err_keys'].items(): print('\\t {}: {}'.format(err, err_key_path))", "token. Did it expire?') else: print(res['err']) return 1 else: print('summary", "== 'proxy': print('--rm-init-inside not supported for cprovider `proxy`') return 1", "docker, podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img', default=None, help='assign image", ".core import WorkspaceInstance from .mgmt import get_local_config, add_key, add_ssh_path, list_local_keys", "= None try: ac = HSAPIClient() except: ac = None", "'not found': print('not found: workspace deployment with id prefix {}'", "1 elif argv_parsed.command == 'addon-cmdsh': if ac is None: print('cannot", "' 'has wdeployment declared')) check_commanddesc = 'check registration of this", "elif argv_parsed.command is None or argv_parsed.command == 'help': if hasattr(argv_parsed,", "configuration data.' ' does it exist? is it broken?') return", "law or agreed to in writing, software # distributed under", "1 except ConnectionError: print('ERROR: failed to reach server. Are you", "is to fail if local configuration already ' 'has wdeployment", "verbose messages about actions by the hardshare client', dest='verbose') argparser.add_argument('--format',", "' 'if there is only 1 workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true',", "== 'attach-camera': config, indices, rc = get_config_with_index(argv_parsed.id_prefix) if rc !=", "argv_parsed.id_prefix is None: if len(config['wdeployments']) == 0: findings = [WorkspaceInstance.inspect_instance()]", "{}'.format(res['origin'])) if 'date_dissolved' in res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command ==", "not recognized by cprovider') return 1 else: # cprovider ==", "'register': if ac is None: print('cannot register without initial local", "' 'if there is only 1 workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H',", "local configuration found. (try `hardshare config -h`)') return 1 try:", "or it is otherwise in a' ' non-recoverable state, then", "return 0 os.close(0) os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id'])", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc config['wdeployments'][index]['cprovider']", "elif argv_parsed.command == 'register': if ac is None: print('cannot register", "to the Internet?') return 1 elif argv_parsed.command == 'rules': if", "wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], )) if wdeployment['cprovider'] in ['docker', 'podman']: print('\\timg:", "= 'manage local and remote configuration' config_parser = subparsers.add_parser('config', description=config_commanddesc,", "here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add new account key') config_parser.add_argument('--add-ssh-path', metavar='PATH',", "1 return 0 elif argv_parsed.command == 'register': if ac is", "may obtain a copy of the License at # #", "deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add add-on mistyproxy to allow HTTP", "terminate_parser.print_help() else: argparser.print_help() else: argparser.print_help() return 0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG)", "= 'manage add-on mistyproxy for your workspace deployments' addon_mistyproxy_parser =", "workspace deployment is' ' hosted here. (this only works if", "help=('delete files in local key directory that' ' are not", "of workspace deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address) of registration:", "return config, None, 1 index = 0 return config, index,", "to workspace deployments' attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0,", "a help message, enter\\n\\n hardshare help addon-cmdsh') return 1 except", "'rt') as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id,", "access rules; ' 'note that access is denied by default,", "if wdeployment['cprovider'] in ['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:')", "help='print this help message and exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?')", "default=False, dest='create_config', help='if no local configuration is found, then create", "print('not found: workspace deployment with id prefix {}' .format(res['id_prefix'])) elif", "config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img is", "1 workspace deployment') return config, None, 1 indices.append(index) index =", "print(hardshare_pkg_version) return 0 elif argv_parsed.command is None or argv_parsed.command ==", "may not use this file except in compliance with the", "or height < 1: print('Width, height must be positive') return", "wd in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc'] is not", "return rc if argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider'] if cprovider ==", "addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add add-on mistyproxy to allow HTTP proxy", "pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG)", "print('no local configuration found. (try `hardshare config -h`)') return 1", "'options: YAML , JSON'), dest='output_format') subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print", "cprovider: {}'.format(cprovider)) return 1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in", "None: output_format = argv_parsed.output_format.lower() if output_format not in ['yaml', 'json']:", "this file except in compliance with the License. # You", ".format(res['id_prefix'])) elif res['err'] == 'wrong authorization token': print('wrong API token.", "dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage add-on mistyproxy for your workspace deployments'", "argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception", "0: return 0 os.close(0) os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname =", "print('unknown cprovider: {}'.format(cprovider)) return 1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container'", "instantiations by you (the owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules', help=('remove", "return 1 elif cprovider not in ['docker', 'podman']: print('unknown cprovider:", "key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path', default=None, help='add device file to", "' files with errors, try `--list`')) config_parser.add_argument('-l', '--list', action='store_true', default=False,", "stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode != 0: print('ERROR: given image name", "add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage add-on vnc for your", "about actions by the hardshare client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None,", "{});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if argv is", "add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None): try: config = get_local_config() except: print('error", "config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error loading configuration data.' '", "supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None, help=('image crop configuration;", "argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception as err: print('{}'.format(err))", "(try `hardshare config -h`)') return 1 try: res = ac.check_registration(argv_parsed.id_prefix)", "'err' in res: if res['err'] == 'not found': print('not found:", "default=False, dest='list_config', help='list configuration') config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config', help='only show", "# # Licensed under the Apache License, Version 2.0 (the", "None: argv = sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line interface' ' for", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc cprovider", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "metavar='PATH', type=str, dest='remove_raw_device_path', default=None, help='remove device previously marked for inclusion", "'proxy']: print('ERROR: cprovider must be one of the following: docker,", "cprovider == 'podman': cp_images = subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img]) if", "1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path is not", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc carg", "rules_parser = subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "if 'err' in res: if res['err'] == 'not found': print('not", "of workspace deployment to advertise' ' (can be unique prefix);", "{}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], ))", "help addon-vnc') return 1 except ValueError as err: print('ERROR: {}'.format(err))", "m in find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format == 'json':", "workspace deployment on which to attach' ' (can be unique", "workspace deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc if 'ssh_key' not", "does it exist? is it broken?') return 1 if not", "addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "if argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config) else: crop = None if", "workspace deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog is not None: config, index,", "{}'.format(ac.base_uri)) return 1 if 'err' in res: if res['err'] ==", "if 'local' not in config: config = { 'local': config,", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: print('The given ID", "argparser.add_argument('--format', metavar='FORMAT', default=None, type=str, help=('special output formatting (default is no", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc wdeployment_id = config['wdeployments'][index]['id']", "rc if 'ssh_key' not in config or config['ssh_key'] is None:", "implies /dev/video0; ' 'if you only have one camera, then", "command is a last resort. First, try `hardshare' ' terminate`", "nargs='?', default=None, help=('id of workspace deployment' ' (can be unique", "1 elif argv_parsed.command == 'stop-cameras': local_keys = list_local_keys() if len(local_keys)", "end='') ui_input = input().lower() if ui_input in ('n', 'no', ''):", "{}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address) of registration: {}'.format(res['origin'])) if 'date_dissolved'", "if ac is None: print('no local configuration found. (try `hardshare", "elif argv_parsed.help_target_command == 'ad': advertise_parser.print_help() elif argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help()", "] if output_format == 'json': print(json.dumps(res)) else: # output_format ==", "help=help_message_purge, dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv) if argv_parsed.print_version or argv_parsed.command ==", "pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is not None: output_format = argv_parsed.output_format.lower() if", "be valid.') return 1 ui_input = None while ui_input not", "not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path is not None: config,", "0 def main(argv=None): pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log',", "attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False,", "configuration:') if len(config['local']['wdeployments']) == 0: print('\\t(none)') else: for wdeployment in", "' has been previously registered under' ' the same user", "depends on the supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None,", "print('unknown cprovider: {}'.format(cprovider)) return 1 if not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given", "HSAPIClient() except: ac = None if argv_parsed.command == 'status': try:", "container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside', help='remove (empty) list of commands", "or implied. # See the License for the specific language", "0: return rc wdeployments = [config['wdeployments'][jj]['id'] for jj in indices]", "configuration, if present rm_wd(get_local_config(), wdid, save=True) elif argv_parsed.command == 'config':", "in config: config = { 'local': config, 'remote': None, }", "== 'help': if hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command is not None:", "accept new instances' advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID',", "(default is no special formatting); ' 'options: YAML , JSON'),", "config['local']['err_keys'].items(): print('\\t {}: {}'.format(err, err_key_path)) if config['remote']: if 'err' in", "1 else: width, height = None, None if argv_parsed.attach_camera_crop_config: crop", "help='print verbose messages about actions by the hardshare client', dest='verbose')", "workspace deployment' ' (can be unique prefix); ' 'this argument", "print('summary of workspace deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address) of", "while ui_input not in ('y', 'yes'): print(('Do you want to", "be one of the following: docker, podman, proxy') return 1", "dissolve_commanddesc = ('dissolve this workspace deployment, making it' ' unavailable", "return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--rm-init-inside", "addon_vnc_parser.print_help() elif argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command == 'ad':", "one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None, help='add program to list of", "1 else: print('summary of workspace deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin", "config, None, 1 index = 0 return config, index, 0", "remote server.') if config['remote']['err'] == 'wrong authorization token': print('wrong API", "open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_vnc:", "from .addons import camera_main, stop_cameras from .addons import add_cmdsh, rm_cmdsh,", "else: indices = [] for idp in id_prefix: index =", "given image name is not recognized by cprovider') return 1", "(empty) list of commands for inside initialization') config_parser.add_argument('-p', '--prune', action='store_true',", "terminal (i.e., run as daemon)', dest='become_daemon') stop_cameras_commanddesc = 'stop camera", "dest='declared_wdeployment_id', default=None, help=('declare that workspace deployment is' ' hosted here.", "print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _, errored_keys = list_local_keys(collect_errors=True) for err_key_path,", "help='list configuration') config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config', help='only show local configuration", "== 'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command", "action='store_true', default=False, dest='create_config', help='if no local configuration is found, then", "[] modify_local(config) elif argv_parsed.cprovider is not None: selected_cprovider = argv_parsed.cprovider.lower()", "recognized by cprovider') return 1 else: # cprovider == 'proxy'", "config: config = { 'local': config, 'remote': None, } print('workspace", "os.close(0) os.close(1) os.close(2) try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width, height=height, crop=crop)", "`hardshare addon-mistyproxy` with a switch.') print('To get a help message,", "== 'proxy': config['wdeployments'][index]['image'] = None else: # selected_cprovider \\in {docker,", "ac is None: print('no local configuration found. (try `hardshare config", "inclusion in container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside', default=None, help='add command", "' (can be unique prefix)')) dissolve_commanddesc = ('dissolve this workspace", "account key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add path to SSH key", "in config or config['ssh_key'] is None: print('WARNING: local configuration does", "supported for cprovider `proxy`') return 1 elif cprovider not in", "0: print('\\t(none)') else: print('\\t' + '\\n\\t'.join(config['local']['keys'])) if 'err_keys' in config['local']", "cprovider == 'proxy' print('ERROR: --assign-image not supported for cprovider `proxy`')", "'yes'): print(('Do you want to dissolve {}? This action cannot", "SSH key pair (does NOT copy the key)') config_parser.add_argument('--add-raw-device', metavar='PATH',", "1 workspace deployment') return config, None, 1 else: if len(config['wdeployments'])", "status_commanddesc = 'get status of local instances and daemon' status_parser", "'check': check_parser.print_help() elif argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command ==", "configuration data of dissolved workspace deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None,", "argparser.add_argument('-h', '--help', dest='print_help', action='store_true', default=False, help='print this help message and", "of commands to execute; ' 'for example, ' 'copy-and-paste value", "than 1 workspace deployment defined.') return config, None, 1 index", "'.pub' )) return 1 elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is", "elif argv_parsed.command == 'stop-cameras': local_keys = list_local_keys() if len(local_keys) <", "argv_parsed.cprovider is not None: selected_cprovider = argv_parsed.cprovider.lower() if selected_cprovider not", "if rc != 0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use", "does it exist?') return 1 if argv_parsed.id_prefix is None: if", "help='add add-on vnc to enable VNC via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm',", "or ' 'has the wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub'", "add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark as unavailable; optionally wait", "height=height, crop=crop) except ConnectionError: if not argv_parsed.become_daemon: print('ERROR: failed to", "', end='') ui_input = input().lower() if ui_input in ('n', 'no',", "or config['ssh_key'] is None: print('WARNING: local configuration does not declare", "'Instances with connection type sshtun cannot launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon:", "is None: print('cannot terminate without valid API client') return 1", "elif argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help()", "to only get local information') return 1 config = {", "if 'err' in res: if res['err'] == 'wrong authorization token':", "wdeployment in enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format == 'json':", "'stop-ad': terminate_parser.print_help() else: argparser.print_help() else: argparser.print_help() return 0 if argv_parsed.verbose:", "expire?') else: print(res['err']) return 1 else: print('summary of workspace deployment", "1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add add-on mistyproxy to", "return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use `hardshare config` with a", "config['local']['wdeployments'] else: ref = config['wdeployments'] for jj, wdeployment in enumerate(ref):", "this user account') else: print('\\nregistered workspace deployments with this user", "print('cannot terminate without valid API client') return 1 try: ac.terminate(config['wdeployments'][index]['id'])", "for cprovider to use (advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path',", "'err' in res: if res['err'] == 'wrong authorization token': print('wrong", "print(res['err']) return 1 else: print('summary of workspace deployment {}'.format(res['id'])) print('\\tcreated:", "return 1 else: width, height = None, None if argv_parsed.attach_camera_crop_config:", "key pair (does NOT copy the key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str,", "local configuration.' ' (try `hardshare config --create`)') return 1 config,", "= 'modify access rules (also known as capabilities or permissions)'", "config['remote']['err'] == 'wrong authorization token': print('wrong API token. Did it", "elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else: print('Use `hardshare addon-cmdsh` with a", "index = find_wd(config, id_prefix) if index is None: print('ERROR: given", "terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace deployment' '", "argv_parsed.targetaddr is None: print('--ip is required with --add') return 1", "'proxy': print('--rm-init-inside not supported for cprovider `proxy`') return 1 elif", "None: if len(config['wdeployments']) == 0: findings = [WorkspaceInstance.inspect_instance()] else: findings", "idp in id_prefix: index = find_wd(config, idp) if index is", "a switch. For example, `hardshare config -l`') print('or to get", "width, height = [int(x) for x in argv_parsed.attach_camera_res.split(',')] if width", "daemon' status_parser = subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "exit') argparser.add_argument('-V', '--version', action='store_true', default=False, help='print version of hardshare (this)", "['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif", "anyone') register_commanddesc = 'register new workspace deployment' register_parser = subparsers.add_parser('register',", "None else: # selected_cprovider \\in {docker, podman} if config['wdeployments'][index]['image'] is", "return 1 with open(local_keys[0], 'rt') as fp: tok = fp.read().strip()", "subparsers.add_parser('help', help='print this help message and exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str,", "' stop it without waiting'), dest='force_terminate') help_message_purge = ('if the", "be executed inside container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside', help='remove (empty)", "dest='new_api_token', help='add new account key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add path", "not None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as err: print('Error:", "help='print version number and exit.') help_parser = subparsers.add_parser('help', help='print this", "default=None, help='add command to be executed inside container') config_parser.add_argument('--rm-init-inside', action='store_true',", "if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is not None: output_format =", "not in ['yaml', 'json']: print('output format unrecognized: {}'.format(argv_parsed.output_format)) return 1", "as err: print('{}'.format(err)) return 1 elif argv_parsed.add_rule_permit_all: ui_input = None", "print('To get a help message, enter\\n\\n hardshare help addon-vnc') return", "'help_target_command') and argv_parsed.help_target_command is not None: if argv_parsed.help_target_command == 'config':", "instances' advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "elif argv_parsed.add_rule_permit_all: ui_input = None while ui_input not in ('y',", "= None else: # selected_cprovider \\in {docker, podman} if config['wdeployments'][index]['image']", "default=None, help=('id of workspace deployment for configuration changes' ' (can", "connection type sshtun cannot launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if os.fork()", "dest='attach_camera_crop_config', default=None, help=('image crop configuration; ' 'default: all wdeployments get", "print('ERROR: {} or {} does not exist or ' 'has", "initialization') config_parser.add_argument('-p', '--prune', action='store_true', default=False, dest='prune_err_keys', help=('delete files in local", "metavar='FILE', dest='new_api_token', help='add new account key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add", "logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' ' %(asctime)s", "Remove from local configuration, if present rm_wd(get_local_config(), wdid, save=True) elif", "in ('n', 'no', ''): return 1 try: res = ac.dissolve_registration(wdid)", "wdeployment_id = config['wdeployments'][index]['id'] local_keys = list_local_keys() if len(local_keys) < 1:", "metavar='ADDRESS', default=None, help='IP address of the Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm',", "with --add') return 1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id,", "deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare that workspace deployment is'", "'this argument is not required ' 'if there is only", "1 or height < 1: print('Width, height must be positive')", "config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog is not None: config,", "to stop container `{}`'.format(findings['container']['name'])) return 1 return 0 else: print('failed", "unique prefix); ' 'this argument is not required ' 'if", "action='store_false', default=True, dest='register_at_most_one', help=('permit registration of more than 1 wdeployment;", "modify_local, rm_wd from .api import HSAPIClient from .err import Error", "ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR: cannot reach daemon. Does it exist?", "prefix); ' 'this argument is not required ' 'if there", "print('or to get a help message, enter\\n\\n hardshare help rules')", "output formatting (default is no special formatting); ' 'options: YAML", "subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace", "default=False, help='add add-on cmdsh to enable terminal access via WebSockets',", "output_format == 'yaml' print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try:", "rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider not in ['docker', 'podman',", "metavar='ID', nargs='*', default=None, help=('id of workspace deployment on which to", "supported for cprovider `proxy`') return 1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config)", "token. Did it expire?') else: print(res['err']) return 1 res['comments'] =", "'podman']: print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:') for terminate_p in wdeployment['terminate']:", "addon-cmdsh` with a switch.') print('To get a help message, enter\\n\\n", "None, None, 1 if len(config['wdeployments']) == 0: print(('ERROR: no workspace", "argv_parsed.rm_terminate_prog is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "config_parser = subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "return rc wdeployment_id = config['wdeployments'][index]['id'] local_keys = list_local_keys() if len(local_keys)", "return 1 if argv_parsed.id_prefix is None: wdid = None else:", "None: print('--ip is required with --add') return 1 add_mistyproxy(wdeployment_id, tok,", "show local configuration data') config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved', help='include configuration", "None: print('no local configuration found. (try `hardshare config -h`)') return", "daemon. Does it exist? (Try `hardshare status`)') return 1 return", "'user account, whether or not started on this host'), dest='all_cameras')", "crop = None if argv_parsed.become_daemon: if os.fork() != 0: return", "help_parser = subparsers.add_parser('help', help='print this help message and exit') help_parser.add_argument('help_target_command',", "you connected to the Internet?') return 1 elif argv_parsed.command ==", "except: print('failed to stop container `{}`'.format(findings['container']['name'])) return 1 return 0", "indices elif id_prefix: index = find_wd(config, id_prefix) if index is", "it exist?') return None, None, 1 if len(config['wdeployments']) == 0:", "for your workspace deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix',", "only 1 workspace deployment')) config_parser.add_argument('-c', '--create', action='store_true', default=False, dest='create_config', help='if", "'no', ''): return 1 try: ac.add_access_rule(wdid, to_user='*') except Exception as", "help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int, help=('on Linux, 0 typically implies /dev/video0;", "= [int(x) for x in argv_parsed.attach_camera_res.split(',')] if width < 1", "return 1 config['wdeployments'][index]['init_inside'] = [] modify_local(config) elif argv_parsed.cprovider is not", "selected_cprovider if selected_cprovider == 'proxy': config['wdeployments'][index]['image'] = None else: #", "cprovider == 'proxy': print('--add-init-inside not supported for cprovider `proxy`') return", "1 elif argv_parsed.command == 'addon-vnc': if ac is None: print('cannot", "print('{}'.format(err)) return 1 if 'err' in res: if res['err'] ==", "== 'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command", "= str(uuid.UUID(argv_parsed.wdid)) except: print('The given ID does not appear to", "copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # #", "in config: ref = config['local']['wdeployments'] else: ref = config['wdeployments'] for", "return rc if 'ssh_key' not in config or config['ssh_key'] is", "exist or ' 'has the wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path +", "in writing, software # distributed under the License is distributed", "print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'] = [] modify_local(config) elif", "1 config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0:", "os.fork() != 0: return 0 os.close(0) os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler())", "subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None, help='id of workspace", "terminated and attempt local clean-up; this' ' command is a", "while contacting remote server ' 'at {}'.format(ac.base_uri)) return 1 if", "from invoking terminal (i.e., run as daemon)', dest='become_daemon') stop_cameras_commanddesc =", "allow HTTP proxy to Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None,", "help=('special output formatting (default is no special formatting); ' 'options:", "'podman', 'proxy']: print('unknown cprovider: {}'.format(cprovider)) return 1 if cprovider ==", "if 'err_keys' in config['local'] and len(config['local']['err_keys']) > 0: print('found possible", "is None: print('WARNING: local configuration does not declare SSH key.\\n'", "if width < 1 or height < 1: print('Width, height", "logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s)", "subparsers.add_parser('version', help='print version number and exit.') help_parser = subparsers.add_parser('help', help='print", "of target workspace deployment' ' (can be unique prefix)')) terminate_parser.add_argument('-f',", "delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' ' %(asctime)s ; %(message)s'", "information') return 1 config = { 'local': config, 'remote': remote_config,", "config, 'remote': remote_config, } if 'local' in config: ref =", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "help='add path to SSH key pair (does NOT copy the", "'has wdeployment declared')) check_commanddesc = 'check registration of this workspace", "a' ' non-recoverable state, then mark it remotely as' '", "argv_parsed.attach_camera_res: width, height = [int(x) for x in argv_parsed.attach_camera_res.split(',')] if", "HSError as err: print('ERROR: {}'.format(err)) return 1 except ConnectionError: print('ERROR:", "help rules') return 1 elif argv_parsed.command == 'check': if ac", "= logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler = logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s", "= subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "default=None, help=('id of workspace deployment to check' ' (can be", "assert ac is not None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError", "as hardshare_pkg_version print(hardshare_pkg_version) return 0 elif argv_parsed.command is None or", "in res: if res['err'] == 'not found': print('not found: workspace", "'attach camera stream to workspace deployments' attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc,", "for cprovider `proxy`') return 1 elif cprovider not in ['docker',", "-l --local to only get local information') return 1 config", "server.') if config['remote']['err'] == 'wrong authorization token': print('wrong API token.", "return 1 else: if ac is None: print('cannot terminate without", "Error as HSError from .addons import camera_main, stop_cameras from .addons", "configuration does not declare SSH key.\\n' 'Instances with connection type", "if local configuration already ' 'has wdeployment declared')) check_commanddesc =", "'podman': cp_images = subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img]) if cp_images.returncode !=", "argv_parsed = argparser.parse_args(argv) if argv_parsed.print_version or argv_parsed.command == 'version': from", "in ['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:') for terminate_p", "elif argv_parsed.cprovider_img is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "tok) else: print('Use `hardshare addon-mistyproxy` with a switch.') print('To get", "print(('Do you want to dissolve {}? This action cannot be", "wait for current instance to finish' terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc,", "os.unlink(err_key_path) elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except: print('failed to add key')", "images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking terminal (i.e.,", "'has the wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub' )) return", "in res: if res['err'] == 'wrong authorization token': print('wrong API", "argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is None: print('--ip is required with --add')", "the License for the specific language governing permissions and #", "1 elif argv_parsed.command == 'rules': if ac is None: print('no", "the wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub' )) return 1", "else: # output_format == 'yaml' print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules or", "argv_parsed.help_target_command == 'status': status_parser.print_help() elif argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help() elif", "help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment' '", "default=False, help='detach from invoking terminal (i.e., run as daemon)', dest='become_daemon')", "except: print('Error occurred while contacting remote server ' 'at {}'.format(ac.base_uri))", "help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment for", "cannot be undone. ' '[y/N] ').format(wdid), end='') ui_input = input().lower()", "== 'yaml': print(yaml.dump(config, default_flow_style=False)) else: if 'local' not in config:", "else: # cprovider == 'proxy' print('ERROR: --assign-image not supported for", "except: print('The given ID does not appear to be valid.')", "== 'register': if ac is None: print('cannot register without initial", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog)", "errored_keys = list_local_keys(collect_errors=True) for err_key_path, err in errored_keys.items(): print('deleting {}...'.format(err_key_path))", "cprovider not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1", "of workspace deployment for configuration changes' ' (can be unique", "argv_parsed.output_format.lower() if output_format not in ['yaml', 'json']: print('output format unrecognized:", "to be valid.') return 1 ui_input = None while ui_input", "None if argv_parsed.become_daemon: if os.fork() != 0: return 0 os.close(0)", "config, 'remote': None, } print('workspace deployments defined in local configuration:')", "'note that access is denied by default, ' 'including to", "res['err'] == 'not found': print('not found: workspace deployment with id", "'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command ==", "dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc =", "type=str, dest='raw_device_path', default=None, help='add device file to present in container')", "return 1 elif argv_parsed.command == 'addon-cmdsh': if ac is None:", "addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark", "account:') for wd in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc']", "output_format = None try: ac = HSAPIClient() except: ac =", "rm_mistyproxy def get_config_with_index(id_prefix=None): try: config = get_local_config() except: print('error loading", "0: print(('ERROR: no workspace deployment in local configuration.')) return config,", "new instances' advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "return 1 except: print('Error occurred while contacting rerobots servers') print('Try", "deployments with this user account:') for wd in config['remote']['deployments']: print('{}'.format(wd['id']))", "' does it exist? is it broken?') return 1 if", "[] for wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings = []", "print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address) of registration: {}' .format(wd['origin'])) if wd['dissolved']:", "try: add_key(argv_parsed.new_api_token) except: print('failed to add key') return 1 elif", "loading configuration data. does it exist?') return None, None, 1", "0 os.close(0) os.close(1) os.close(2) try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width, height=height,", "`{}`'.format(findings['container']['name'])) return 1 return 0 else: print('failed to detect local", "rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all', help='permit instantiations by anyone') register_commanddesc =", "else: print('failed to detect local instance') return 1 else: if", "; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command == 'stop-ad':", "camera streams previously started by attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc,", "print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:') for terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p))", "{}'.format(res['date_created'])) print('\\torigin (address) of registration: {}'.format(res['origin'])) if 'date_dissolved' in res:", "# distributed under the License is distributed on an \"AS", "has been previously registered under' ' the same user account.)'))", "config` with a switch. For example, `hardshare config -l`') print('or", "configuration.' ' (try `hardshare config --create`)') return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one))", "# Unless required by applicable law or agreed to in", "broken?') return 1 if not argv_parsed.only_local_config: # Try to get", "= [] for wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings =", "local information') return 1 config = { 'local': config, 'remote':", "except: print('error loading configuration data. does it exist?') return None,", "find_wd, modify_local, rm_wd from .api import HSAPIClient from .err import", "1 workspace deployment')) config_parser.add_argument('-c', '--create', action='store_true', default=False, dest='create_config', help='if no", "'at {}'.format(ac.base_uri)) return 1 if 'err' in res: if res['err']", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "'--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside is not None: config, index,", "= config['wdeployments'][index]['id'] if argv_parsed.list_rules: try: res = ac.get_access_rules(wdid) except Exception", "addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command == 'ad': advertise_parser.print_help() elif argv_parsed.help_target_command == 'stop-ad':", "default=None, help=('id of workspace deployment to advertise' ' (can be", "of hardshare (this) package.', dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true', default=False, help='print", "rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc", "drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None, help=('image crop configuration; '", "dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage add-on vnc for your workspace deployments'", "config_parser.add_argument('-c', '--create', action='store_true', default=False, dest='create_config', help='if no local configuration is", "default=False, dest='prune_err_keys', help=('delete files in local key directory that' '", "advertise_commanddesc = 'advertise availability, accept new instances' advertise_parser = subparsers.add_parser('ad',", "err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'addon-mistyproxy': if", "(i.e., run as daemon)', dest='become_daemon') attach_camera_commanddesc = 'attach camera stream", "which to attach' ' (can be unique prefix); ' 'this", "prefix)')) dissolve_commanddesc = ('dissolve this workspace deployment, making it' '", "enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format == 'json': print(json.dumps(config)) elif", "prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true', default=False, help=('if there is an active", "you (the owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules', help=('remove all access", "unavailable; optionally wait for current instance to finish' terminate_parser =", "None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address) of registration: {}' .format(wd['origin'])) if", "{}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:') for terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound", "the Apache License, Version 2.0 (the \"License\"); # you may", "try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None, help=('id of workspace deployment", "dest='raw_device_path', default=None, help='add device file to present in container') config_parser.add_argument('--cprovider',", "config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path', default=None, help='remove device previously marked for", "subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int, help=('on Linux, 0 typically", "fp.read().strip() try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok)", "output_format == 'yaml' print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command == 'attach-camera': config,", "0 typically implies /dev/video0; ' 'if you only have one", "else: argparser.print_help() return 0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is", "= [config['wdeployments'][jj]['id'] for jj in indices] local_keys = list_local_keys() if", "import yaml from aiohttp.client_exceptions import ClientConnectorError as ConnectionError from .core", "if hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command is not None: if argv_parsed.help_target_command", "cprovider: {}'.format(cprovider)) return 1 if cprovider == 'podman': cp_images =", "except HSError as err: print('Error: {}'.format(err)) return 1 except: print('Error", "print(config['remote']['err']) return 1 if len(config['remote']['deployments']) == 0: print('\\nno registered workspace", "print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc'] is not None: print('\\tdesc: {}'.format(wd['desc']))", "are not valid; to get list of' ' files with", "be unique prefix)')) advertise_commanddesc = 'advertise availability, accept new instances'", "exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog is not None:", "file to present in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider', default=None,", "help message, enter\\n\\n hardshare help addon-mistyproxy') return 1 except ValueError", "in enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format == 'json': print(json.dumps(config))", "exist? is it broken?') return 1 if not argv_parsed.only_local_config: #", "device file does not exist') return 1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path)", "1 config['wdeployments'][index]['init_inside'] = [] modify_local(config) elif argv_parsed.cprovider is not None:", "[] for idp in id_prefix: index = find_wd(config, idp) if", "remote server ' 'at {}'.format(ac.base_uri)) return 1 if 'err' in", "[0] else: indices = [] for idp in id_prefix: index", "elif argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command == 'status': status_parser.print_help()", "configuration is found, then create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None,", "not one or it is otherwise in a' ' non-recoverable", "res = ac.get_access_rules(wdid) except Exception as err: print('{}'.format(err)) return 1", "== 'status': try: config = get_local_config() except: print('error loading configuration", "argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else: print('Use `hardshare addon-vnc` with a switch.')", "print('workspace deployments defined in local configuration:') if len(config['local']['wdeployments']) == 0:", "dest='add_rule_permit_me', help='permit instantiations by you (the owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False,", "normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path): print('ERROR: given path does", "default=None, help=('id of target workspace deployment' ' (can be unique", "fp.read().strip() try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok)", "= 'stop camera streams previously started by attach-camera' stop_cameras_parser =", "does not match precisely 1 workspace deployment') return config, None,", "to enable terminal access via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False,", "keys with errors:') for err_key_path, err in config['local']['err_keys'].items(): print('\\t {}:", "%(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command == 'stop-ad': config,", "elif argv_parsed.help_target_command == 'status': status_parser.print_help() elif argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help()", "as err: print('ERROR: {}'.format(err)) return 1 except ConnectionError: print('ERROR: failed", "in ('y', 'yes'): print('Do you want to permit access by", "else: print('\\t' + '\\n\\t'.join(config['local']['keys'])) if 'err_keys' in config['local'] and len(config['local']['err_keys'])", "else: for wdeployment in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format(", "config_parser.print_help() elif argv_parsed.help_target_command == 'rules': rules_parser.print_help() elif argv_parsed.help_target_command == 'register':", "with a switch.') print('To get a help message, enter\\n\\n hardshare", "ConnectionError: if not argv_parsed.become_daemon: print('ERROR: failed to reach server. Are", "type=str, dest='cprovider_img', default=None, help='assign image for cprovider to use (advanced", "print('ERROR: cprovider must be one of the following: docker, podman,", "'stop-cameras': local_keys = list_local_keys() if len(local_keys) < 1: print('No valid", "return 1 elif argv_parsed.command == 'addon-mistyproxy': if ac is None:", "+ '.pub' )) return 1 elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id", "not in config or config['ssh_key'] is None: print('WARNING: local configuration", "argv_parsed.only_local_config: # Try to get remote config, given possibly new", "'proxy': print('--add-init-inside not supported for cprovider `proxy`') return 1 elif", "rm_cmdsh(wdeployment_id, tok) else: print('Use `hardshare addon-cmdsh` with a switch.') print('To", "except FileNotFoundError: print('ERROR: cannot reach daemon. Does it exist? (Try", "under the License is distributed on an \"AS IS\" BASIS,", "then try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None, help=('id of workspace", "appear to be valid.') return 1 wdid = config['wdeployments'][index]['id'] if", "import HSAPIClient from .err import Error as HSError from .addons", "help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace deployment'", "'-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed to stop container `{}`'.format(findings['container']['name']))", "print('error loading configuration data.' ' does it exist? is it", "terminal (i.e., run as daemon)', dest='become_daemon') attach_camera_commanddesc = 'attach camera", "program from list of commands to execute; ' 'for example,", "'ssh_key' not in config or config['ssh_key'] is None: print('WARNING: local", "ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as err: print('Error: {}'.format(err)) return 1 except:", "there is not one or it is otherwise in a'", "-l`') return 1 with open(local_keys[0], 'rt') as fp: tok =", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc wdeployments =", "{}'.format(argv_parsed.output_format)) return 1 else: output_format = None try: ac =", "= None if argv_parsed.become_daemon: if os.fork() != 0: return 0", "your workspace deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID',", "nargs='?') config_commanddesc = 'manage local and remote configuration' config_parser =", "(this) package.', dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true', default=False, help='print verbose messages", "help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?') config_commanddesc = 'manage local and remote", "you want to dissolve {}? This action cannot be undone.", "findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings = [] for m in find_wd(config, argv_parsed.id_prefix,", "errors, try `--list`')) config_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_config', help='list configuration')", "then' ' stop it without waiting'), dest='force_terminate') help_message_purge = ('if", "occurred while contacting remote server.') if config['remote']['err'] == 'wrong authorization", "deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking terminal (i.e.,", "return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc !=", "ac is None: print('cannot terminate without valid API client') return", "[WorkspaceInstance.inspect_instance()] else: findings = [] for wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd))", "remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as err: print('Error: {}'.format(err)) return", "and attempt local clean-up; this' ' command is a last", "License. \"\"\"Command-line interface \"\"\" import argparse import json import logging", "= argv_parsed.output_format.lower() if output_format not in ['yaml', 'json']: print('output format", "switch.') print('To get a help message, enter\\n\\n hardshare help addon-cmdsh')", "argv_parsed.command == 'dissolve': if ac is None: print('no local configuration", "argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version number and exit.') help_parser = subparsers.add_parser('help',", "if os.fork() != 0: return 0 os.close(0) os.close(1) os.close(2) try:", "anyone? [y/N] ', end='') ui_input = input().lower() if ui_input in", "if argv_parsed.id_prefix is None: if len(config['wdeployments']) == 0: findings =", "help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment' '", "dest='remove_raw_device_path', default=None, help='remove device previously marked for inclusion in container')", "valid; to get list of' ' files with errors, try", "= WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in findings: try: subprocess.check_call([cprovider, 'rm', '-f',", "= find_wd(config, id_prefix) if index is None: print('ERROR: given prefix", "= fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR: failed to", "1 config = { 'local': config, 'remote': remote_config, } if", "given path does not exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif", "workspace deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking terminal", "print('\\t {}: {}'.format(err, err_key_path)) if config['remote']: if 'err' in config['remote']:", "description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment", "for err_key_path, err in errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token:", "if not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device file does not exist')", "return 1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path is", "= json.loads(argv_parsed.attach_camera_crop_config) else: crop = None if argv_parsed.become_daemon: if os.fork()", "stop_cameras from .addons import add_cmdsh, rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy", "index, 0 def main(argv=None): pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler =", "deployment defined.') return config, None, 1 index = 0 return", "found: workspace deployment with id prefix {}' .format(res['id_prefix'])) elif res['err']", "== 'ad': advertise_parser.print_help() elif argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help() else: argparser.print_help()", "findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed to stop container `{}`'.format(findings['container']['name'])) return", "image name is not recognized by cprovider') return 1 elif", "in config['remote']: print('Error occurred while contacting remote server.') if config['remote']['err']", "modify_local(config) elif argv_parsed.remove_raw_device_path is not None: config, index, rc =", "1 index = 0 return config, index, 0 def main(argv=None):", "ANY KIND, either express or implied. # See the License", "command: more than 1 workspace deployment defined.') return config, None,", "with open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try: stop_cameras(tok,", "elif argv_parsed.command == 'check': if ac is None: print('no local", "this workspace deployment' check_parser = subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID',", "the License. # You may obtain a copy of the", "initial local configuration.' ' (try `hardshare config --create`)') return 1", "1 elif argv_parsed.command == 'check': if ac is None: print('no", "default=None, type=str, help=('special output formatting (default is no special formatting);", "data') config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved', help='include configuration data of dissolved", "tok = fp.read().strip() try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh:", "add-on vnc for your workspace deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc,", "help message and exit') argparser.add_argument('-V', '--version', action='store_true', default=False, help='print version", "err: print('Error: {}'.format(err)) return 1 except: print('Error occurred while contacting", "# See the License for the specific language governing permissions", "if cprovider == 'podman': cp_images = subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img])", "it expire?') else: print(res['err']) return 1 else: print('summary of workspace", "tok = fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR: failed", "print(json.dumps(res)) else: # output_format == 'yaml' print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules", "assert ac is not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path is", "elif argv_parsed.add_terminate_prog is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "'stop camera streams previously started by attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras',", "argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub' )) return 1 elif argv_parsed.create_config: get_local_config(create_if_empty=True)", "from invoking terminal (i.e., run as daemon)', dest='become_daemon') attach_camera_commanddesc =", "0 else: print('failed to detect local instance') return 1 else:", "For example, `hardshare config -l`') print('or to get a help", "tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else: print('Use `hardshare addon-cmdsh` with", "argv_parsed.command == 'attach-camera': config, indices, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "if res['err'] == 'wrong authorization token': print('wrong API token. Did", "in ('n', 'no', ''): return 1 try: ac.add_access_rule(wdid, to_user='*') except", "return 1 if not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device file does", "only have one camera, then try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*',", "\"\"\"Command-line interface \"\"\" import argparse import json import logging import", "rc != 0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use `hardshare", "description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment'", "!= 0: return rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif", "register_commanddesc = 'register new workspace deployment' register_parser = subparsers.add_parser('register', description=register_commanddesc,", "if len(config['local']['wdeployments']) == 0: print('\\t(none)') else: for wdeployment in config['local']['wdeployments']:", "'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img is not None: config, index, rc", "metavar='CPROVIDER', type=str, dest='cprovider', default=None, help='select a container provider: docker, podman,", "'if there is only 1 workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False,", "findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format == 'json': print(json.dumps(findings)) else: # output_format ==", "to the Internet?') return 1 elif argv_parsed.command == 'addon-cmdsh': if", "default=False, dest='list_rules', help='list all rules') rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me', help='permit", "argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command == 'status': status_parser.print_help() elif", "add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {} or {} does not exist or", "if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else: print('Use", "it broken?') return 1 if not argv_parsed.only_local_config: # Try to", "argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog is not None: config, index, rc", "open(local_keys[0], 'rt') as fp: tok = fp.read().strip() if argv_parsed.attach_camera_res: width,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "config -l` here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add new account key')", "if ui_input in ('n', 'no', ''): return 1 try: ac.add_access_rule(wdid,", "in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'],", "ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format == 'json': print(json.dumps(config)) elif output_format", "modify_local(config) else: print('Use `hardshare config` with a switch. For example,", "rule explicitly permits it.', ] if output_format == 'json': print(json.dumps(res))", "get list of' ' files with errors, try `--list`')) config_parser.add_argument('-l',", "dest='become_daemon') stop_cameras_commanddesc = 'stop camera streams previously started by attach-camera'", "'[y/N] ').format(wdid), end='') ui_input = input().lower() if ui_input in ('n',", "#!/usr/bin/env python # Copyright (C) 2018 rerobots, Inc. # #", "writing, software # distributed under the License is distributed on", "configuration') config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config', help='only show local configuration data')", "`--list`')) config_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_config', help='list configuration') config_parser.add_argument('--local', action='store_true',", "'--list', action='store_true', default=False, dest='list_rules', help='list all rules') rules_parser.add_argument('--permit-me', action='store_true', default=False,", "description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int, help=('on Linux, 0 typically implies", "ac is None: print('cannot register without initial local configuration.' '", "return 1 try: res = ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred while", "to get a help message, enter\\n\\n hardshare help config') return", "import logging import logging.handlers import os import os.path import subprocess", "to get list of' ' files with errors, try `--list`'))", "print('wrong API token. Did it expire?') else: print(res['err']) return 1", "{}'.format(cprovider)) return 1 if not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device file", "argv_parsed.prune_err_keys: _, errored_keys = list_local_keys(collect_errors=True) for err_key_path, err in errored_keys.items():", "return 0 elif argv_parsed.command is None or argv_parsed.command == 'help':", "== 'addon-mistyproxy': if ac is None: print('cannot register without initial", "nargs='?', default=None, help=('id of workspace deployment to check' ' (can", "'if there is only 1 workspace deployment')) rules_parser.add_argument('-l', '--list', action='store_true',", "= indices elif id_prefix: index = find_wd(config, id_prefix) if index", "vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage add-on mistyproxy for your workspace", "argv_parsed.id_prefix is None: wdid = None else: try: wdid =", "user account.)')) rules_commanddesc = 'modify access rules (also known as", "print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'ad': if ac", "'no', ''): return 1 try: res = ac.dissolve_registration(wdid) except: print('Error", "Exception as err: print('{}'.format(err)) return 1 if 'err' in res:", "deployment')) rules_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_rules', help='list all rules') rules_parser.add_argument('--permit-me',", "option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path', default=None, help='remove device previously marked", "config, index, 0 def main(argv=None): pkglogger = logging.getLogger('hardshare') pkglogger.setLevel(logging.WARNING) loghandler", "> 1: print('ERROR: ambiguous command: more than 1 workspace deployment", "1: print('Width, height must be positive') return 1 else: width,", "print('Width, height must be positive') return 1 else: width, height", "wd['desc'] is not None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address) of registration:", "default=None, help=('image crop configuration; ' 'default: all wdeployments get full", "== 'check': if ac is None: print('no local configuration found.", "config --create`)') return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as err:", "changes' ' (can be unique prefix); ' 'this argument is", "previously marked for inclusion in container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside',", "Copyright (C) 2018 rerobots, Inc. # # Licensed under the", "more than 1 wdeployment; ' 'default is to fail if", "= [0] else: indices = [] for idp in id_prefix:", "proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img', default=None, help='assign image for cprovider", "type=str, dest='remove_raw_device_path', default=None, help='remove device previously marked for inclusion in", "get_local_config, add_key, add_ssh_path, list_local_keys from .mgmt import find_wd, modify_local, rm_wd", "to fail if local configuration already ' 'has wdeployment declared'))", "None: assert ac is not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path", "argv_parsed.print_version or argv_parsed.command == 'version': from . import __version__ as", "ID does not appear to be valid.') return 1 wdid", "to present in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider', default=None, help='select", "config = { 'local': config, 'remote': remote_config, } if 'local'", "try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {} or {} does not exist", "return 1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in findings: try:", "rules') rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me', help='permit instantiations by you (the", "--purge.') terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv) if", "return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command == 'stop-ad': config, index, rc =", "= argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version number and exit.') help_parser =", "print('Use `hardshare addon-cmdsh` with a switch.') print('To get a help", "dissolved workspace deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare that workspace", "config['local'] and len(config['local']['err_keys']) > 0: print('found possible keys with errors:')", "ref = config['wdeployments'] for jj, wdeployment in enumerate(ref): ref[jj]['url'] =", "(%(levelname)s) (pid: {});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else:", "import uuid import yaml from aiohttp.client_exceptions import ClientConnectorError as ConnectionError", "print('Error occurred while contacting remote server.') if config['remote']['err'] == 'wrong", "only 1 workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add add-on vnc", "argparser.print_help() else: argparser.print_help() return 0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format", "== 'docker': cp_images = subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)", "default=None, help=('declare that workspace deployment is' ' hosted here. (this", "and exit') argparser.add_argument('-V', '--version', action='store_true', default=False, help='print version of hardshare", "else: width, height = None, None if argv_parsed.attach_camera_crop_config: crop =", "for current instance to finish' terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc)", "err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'addon-vnc': if", "{}: {}'.format(err, err_key_path)) if config['remote']: if 'err' in config['remote']: print('Error", "= fp.read().strip() try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id,", "config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img', default=None, help='assign image for cprovider to", "id_prefix) if index is None: print('ERROR: given prefix does not", "valid.') return 1 wdid = config['wdeployments'][index]['id'] if argv_parsed.list_rules: try: res", "in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'] =", "'--all', action='store_true', default=False, help=('stop all attached cameras associated with this", "argparse.ArgumentParser(description=('Command-line interface' ' for the hardshare client'), add_help=False) argparser.add_argument('-h', '--help',", "description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment", "argv_parsed.command == 'ad': if ac is None: print('cannot register without", "dest='create_config', help='if no local configuration is found, then create one')", "len(config['remote']['deployments']) == 0: print('\\nno registered workspace deployments with this user", "'proxy' print('ERROR: --assign-image not supported for cprovider `proxy`') return 1", "argv_parsed.command == 'addon-mistyproxy': if ac is None: print('cannot register without", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog)", "`hardshare' ' terminate` without --purge.') terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance')", "= fp.read().strip() try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id,", "if config['remote']['err'] == 'wrong authorization token': print('wrong API token. Did", "0: return rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside", "argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help() else: argparser.print_help() else: argparser.print_help() return 0", "tok = fp.read().strip() try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc:", "metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None, help=('image crop configuration; ' 'default: all", "1 else: if ac is None: print('cannot terminate without valid", "not exist') return 1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif", "# Copyright (C) 2018 rerobots, Inc. # # Licensed under", "os.close(0) os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler =", "else: if 'local' not in config: config = { 'local':", "import find_wd, modify_local, rm_wd from .api import HSAPIClient from .err", "# limitations under the License. \"\"\"Command-line interface \"\"\" import argparse", "new account key') config_parser.add_argument('--add-ssh-path', metavar='PATH', dest='new_ssh_path', help='add path to SSH", "1 workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res', default=None, help=('width and", "is it broken?') return 1 if not argv_parsed.only_local_config: # Try", "rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me', help='permit instantiations by you (the owner)')", "None or argv_parsed.command == 'help': if hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command", "return 1 elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is not None:", "= selected_cprovider if selected_cprovider == 'proxy': config['wdeployments'][index]['image'] = None else:", "help=('id of workspace deployment for configuration changes' ' (can be", "{}'.format(err)) return 1 except: print('Error occurred while contacting rerobots servers')", "not argv_parsed.only_local_config: # Try to get remote config, given possibly", "= config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-init-inside not supported for", "account.)')) rules_commanddesc = 'modify access rules (also known as capabilities", "get a help message, enter\\n\\n hardshare help addon-mistyproxy') return 1", "addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "= 'check registration of this workspace deployment' check_parser = subparsers.add_parser('check',", "API token. Did it expire?') else: print(res['err']) return 1 res['comments']", "None else: try: wdid = str(uuid.UUID(argv_parsed.id_prefix)) except: config, index, rc", "modify_local(config) elif argv_parsed.rm_init_inside: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "import add_cmdsh, rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None): try:", "'check': if ac is None: print('no local configuration found. (try", "'dissolve': if ac is None: print('no local configuration found. (try", "tok = fp.read().strip() if argv_parsed.attach_camera_res: width, height = [int(x) for", "return 1 if 'err' in res: if res['err'] == 'wrong", "None, 1 index = [0] else: indices = [] for", "{}'.format(err)) return 1 elif argv_parsed.command == 'addon-mistyproxy': if ac is", "'default is to fail if local configuration already ' 'has", "'local': config, 'remote': None, } print('workspace deployments defined in local", "the following: docker, podman, proxy') return 1 config, index, rc", "return 1 try: wdid = str(uuid.UUID(argv_parsed.wdid)) except: print('The given ID", "height = [int(x) for x in argv_parsed.attach_camera_res.split(',')] if width <", "1 if argv_parsed.id_prefix is None: wdid = None else: try:", "except ConnectionError: print('ERROR: failed to reach server. Are you connected", "ambiguous command: more than 1 workspace deployment defined.') return config,", "help='permit instantiations by you (the owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules',", "to you (the owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all', help='permit instantiations", "1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else: print('Use", "Inc. # # Licensed under the Apache License, Version 2.0", "default=None, help='add program to list of commands to execute') config_parser.add_argument('--rm-terminate-prog',", "does not appear to be valid.') return 1 wdid =", "in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if len(config['local']['keys']) == 0: print('\\t(none)')", "elif argv_parsed.cprovider is not None: selected_cprovider = argv_parsed.cprovider.lower() if selected_cprovider", "terminate_parser.add_argument('-f', '--force', action='store_true', default=False, help=('if there is an active instance,", "of commands for inside initialization') config_parser.add_argument('-p', '--prune', action='store_true', default=False, dest='prune_err_keys',", "`hardshare status`)') return 1 return 0 elif argv_parsed.command == 'register':", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "argv_parsed.raw_device_path is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "not exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog is not", "not os.path.exists(normalized_path): print('ERROR: given path does not exist') return 1", "is an active instance, then' ' stop it without waiting'),", "the key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path', default=None, help='add device file", "--create`)') return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "str(uuid.UUID(argv_parsed.wdid)) except: print('The given ID does not appear to be", "return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-raw-device", "if res['err'] == 'not found': print('not found: workspace deployment with", "token. Did it expire?') else: print(res['err']) return 1 # Remove", "return 1 # Remove from local configuration, if present rm_wd(get_local_config(),", "clean-up; this' ' command is a last resort. First, try", "by cprovider') return 1 else: # cprovider == 'proxy' print('ERROR:", "sys import uuid import yaml from aiohttp.client_exceptions import ClientConnectorError as", "'default: all wdeployments get full images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False,", "default=None, help='assign image for cprovider to use (advanced option)') config_parser.add_argument('--rm-raw-device',", "= logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' '", "rc != 0: return rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if not", "config, indices, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return", "[ 'Access is denied unless a rule explicitly permits it.',", "cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-raw-device not supported", "import sys import uuid import yaml from aiohttp.client_exceptions import ClientConnectorError", "add_key, add_ssh_path, list_local_keys from .mgmt import find_wd, modify_local, rm_wd from", "run as daemon)', dest='become_daemon') stop_cameras_commanddesc = 'stop camera streams previously", "open(local_keys[0], 'rt') as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_mistyproxy:", "== 'register': register_parser.print_help() elif argv_parsed.help_target_command == 'check': check_parser.print_help() elif argv_parsed.help_target_command", "attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res', default=None, help=('width and height of captured", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "WorkspaceInstance from .mgmt import get_local_config, add_key, add_ssh_path, list_local_keys from .mgmt", "' 'note that access is denied by default, ' 'including", "print('Error occurred while contacting remote server ' 'at {}'.format(ac.base_uri)) return", "not appear to be valid.') return 1 ui_input = None", "it is otherwise in a' ' non-recoverable state, then mark", "VNC via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on vnc',", "argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command == 'ad': advertise_parser.print_help() elif", "argv_parsed.help_target_command == 'ad': advertise_parser.print_help() elif argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help() else:", "rc wdeployment_id = config['wdeployments'][index]['id'] local_keys = list_local_keys() if len(local_keys) <", "with this user account:') for wd in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated:", "ac.sync_config() elif argv_parsed.raw_device_path is not None: config, index, rc =", "started on this host'), dest='all_cameras') addon_cmdsh_commanddesc = 'manage add-on cmdsh", "return 1 wdid = config['wdeployments'][index]['id'] if argv_parsed.list_rules: try: res =", "err_key_path, err in errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token: try:", "actions by the hardshare client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None, type=str,", "previously registered under' ' the same user account.)')) rules_commanddesc =", "or argv_parsed.command == 'help': if hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command is", "carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside is not None:", "return 1 res['comments'] = [ 'Access is denied unless a", "len(config['local']['err_keys']) > 0: print('found possible keys with errors:') for err_key_path,", "None while ui_input not in ('y', 'yes'): print('Do you want", "os.close(1) os.close(2) try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width, height=height, crop=crop) except", "if argv_parsed.list_config: try: config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error loading", "type=str, help=('special output formatting (default is no special formatting); '", "help='IP address of the Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False,", "for wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings = [] for", "nargs='?', default=None, help=('id of workspace deployment for configuration changes' '", "only 1 workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res', default=None, help=('width", "try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR: failed to reach server.", "0: print('ERROR: given image name is not recognized by cprovider')", "metavar='ID', nargs='?', default=None, help=('id of workspace deployment for configuration changes'", "then create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None, help='add program to", "to list of commands to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None,", "configuration changes' ' (can be unique prefix); ' 'this argument", "works if it' ' has been previously registered under' '", "Check: `hardshare config -l`') return 1 with open(local_keys[0], 'rt') as", "is not recognized by cprovider') return 1 else: # cprovider", "key') return 1 elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {}", "on which to attach' ' (can be unique prefix); '", "specific language governing permissions and # limitations under the License.", "1 elif argv_parsed.add_rule_permit_all: ui_input = None while ui_input not in", "for jj in indices] local_keys = list_local_keys() if len(local_keys) <", "of registration: {}' .format(wd['origin'])) if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys:", "(can be unique prefix); ' 'this argument is not required", "state, then mark it remotely as' ' terminated and attempt", "height < 1: print('Width, height must be positive') return 1", "jj in indices] local_keys = list_local_keys() if len(local_keys) < 1:", "rc != 0: return rc if 'ssh_key' not in config", "err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'ad': if", "' '[y/N] ').format(wdid), end='') ui_input = input().lower() if ui_input in", "help='if no local configuration is found, then create one') config_parser.add_argument('--add-terminate-prog',", "invoking terminal (i.e., run as daemon)', dest='become_daemon') stop_cameras_commanddesc = 'stop", "config -l --local to only get local information') return 1", "authorization token': print('wrong API token. Did it expire?') else: print(config['remote']['err'])", "return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog is not None: config,", "try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else:", "connected to the Internet?') return 1 elif argv_parsed.command == 'addon-cmdsh':", "register without initial local configuration.' ' (try `hardshare config --create`)')", "config -l`') print('or to get a help message, enter\\n\\n hardshare", "you only have one camera, then try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID',", "format unrecognized: {}'.format(argv_parsed.output_format)) return 1 else: output_format = None try:", "indicates that an instance is active,' ' but there is", "= 'mark as unavailable; optionally wait for current instance to", "wdid = None else: try: wdid = str(uuid.UUID(argv_parsed.id_prefix)) except: config,", "err_key_path, err in config['local']['err_keys'].items(): print('\\t {}: {}'.format(err, err_key_path)) if config['remote']:", "help=('id of target workspace deployment' ' (can be unique prefix);", "fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR: failed to reach", "isinstance(id_prefix, list): if len(id_prefix) == 0: if len(config['wdeployments']) > 1:", "undone. ' '[y/N] ').format(wdid), end='') ui_input = input().lower() if ui_input", "for wd in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc'] is", "in ['docker', 'podman', 'proxy']: print('unknown cprovider: {}'.format(cprovider)) return 1 if", "is not recognized by cprovider') return 1 elif cprovider ==", "HSAPIClient from .err import Error as HSError from .addons import", "local configuration:') if len(config['local']['wdeployments']) == 0: print('\\t(none)') else: for wdeployment", "default, ' 'including to you (the owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False,", "stream to workspace deployments' attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera',", "' 'default is to fail if local configuration already '", "# you may not use this file except in compliance", "workspace deployment for configuration changes' ' (can be unique prefix);", "of the Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on", "given device file does not exist') return 1 carg =", "sys.argv[1:] argparser = argparse.ArgumentParser(description=('Command-line interface' ' for the hardshare client'),", "help message, enter\\n\\n hardshare help config') return 1 return 0", "if isinstance(id_prefix, list): if len(id_prefix) == 0: if len(config['wdeployments']) >", "return 1 if argv_parsed.id_prefix is None: if len(config['wdeployments']) == 0:", "== 'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command", "config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'],", "try: res = ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred while contacting remote", "action='store_true', default=False, help='remove add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark as", "'check registration of this workspace deployment' check_parser = subparsers.add_parser('check', description=check_commanddesc,", "`hardshare config -h`)') return 1 try: res = ac.check_registration(argv_parsed.id_prefix) except:", "action='store_true', default=False, dest='rm_init_inside', help='remove (empty) list of commands for inside", "1 elif cprovider not in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider))", "this host'), dest='all_cameras') addon_cmdsh_commanddesc = 'manage add-on cmdsh for your", "help message, enter\\n\\n hardshare help rules') return 1 elif argv_parsed.command", "'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format == 'json': print(json.dumps(config)) elif output_format == 'yaml':", "None: print('cannot register without initial local configuration.' ' (try `hardshare", "-h`)') return 1 if argv_parsed.id_prefix is None: wdid = None", "dest='add_init_inside', default=None, help='add command to be executed inside container') config_parser.add_argument('--rm-init-inside',", "print(yaml.dump(config, default_flow_style=False)) else: if 'local' not in config: config =", "registration: {}'.format(res['origin'])) if 'date_dissolved' in res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command", "wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub' )) return 1 elif", "podman} if config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config) elif", "== 'addon-cmdsh': if ac is None: print('cannot register without initial", "client'), add_help=False) argparser.add_argument('-h', '--help', dest='print_help', action='store_true', default=False, help='print this help", "not in ('y', 'yes'): print(('Do you want to dissolve {}?", "'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if", "'config': if argv_parsed.list_config: try: config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error", "is a last resort. First, try `hardshare' ' terminate` without", "servers') print('Try config -l --local to only get local information')", "deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "action='store_true', default=False, dest='list_rules', help='list all rules') rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me',", "if len(config['wdeployments']) == 0: findings = [WorkspaceInstance.inspect_instance()] else: findings =", "not in ('y', 'yes'): print('Do you want to permit access", "addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage", "print('\\nfound keys:') if len(config['local']['keys']) == 0: print('\\t(none)') else: print('\\t' +", "'--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path is not None: config, index,", "exist?') return 1 if argv_parsed.id_prefix is None: if len(config['wdeployments']) ==", "'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode != 0: print('ERROR: given", "in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config)", "'manage local and remote configuration' config_parser = subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc)", "without waiting'), dest='force_terminate') help_message_purge = ('if the server indicates that", "for m in find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format ==", "in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider', default=None, help='select a container", "action='store_true', default=False, help='add add-on vnc to enable VNC via rerobots.net',", "argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {} or {} does not", "if cp_images.returncode != 0: print('ERROR: given image name is not", "'status': status_parser.print_help() elif argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command ==", "status_parser.print_help() elif argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command == 'stop-cameras':", "-l` here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add new account key') config_parser.add_argument('--add-ssh-path',", "is None: print('cannot register without initial local configuration.' ' (try", "else: print(res['err']) return 1 else: print('summary of workspace deployment {}'.format(res['id']))", "'rt') as fp: tok = fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except", "under the Apache License, Version 2.0 (the \"License\"); # you", "'--daemon', action='store_true', default=False, help='detach from invoking terminal (i.e., run as", "client') return 1 try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR: cannot reach", "provider: docker, podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img', default=None, help='assign", "list of commands to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove", "' 'for example, ' 'copy-and-paste value shown in `hardshare config", "config -h`)') return 1 try: res = ac.check_registration(argv_parsed.id_prefix) except: print('Error", "ui_input = None while ui_input not in ('y', 'yes'): print(('Do", "example, ' 'copy-and-paste value shown in `hardshare config -l` here'))", "default=False, help=('if there is an active instance, then' ' stop", "findings = [] for m in find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m]))", "description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment", "daemon)', dest='become_daemon') stop_cameras_commanddesc = 'stop camera streams previously started by", "err: print('{}'.format(err)) return 1 elif argv_parsed.add_rule_permit_all: ui_input = None while", "'yes'): print('Do you want to permit access by anyone? [y/N]", "elif argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command == 'ad': advertise_parser.print_help()", "to reach server. Are you connected to the Internet?') return", "return 1 ui_input = None while ui_input not in ('y',", "help='permit instantiations by anyone') register_commanddesc = 'register new workspace deployment'", "wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if len(config['local']['keys']) == 0: print('\\t(none)') else:", "metavar='FORMAT', default=None, type=str, help=('special output formatting (default is no special", "if rc != 0: return rc if argv_parsed.purge_supposed_instance: cprovider =", "local configuration found. (try `hardshare config -h`)') return 1 if", "print('ERROR: --assign-image not supported for cprovider `proxy`') return 1 config['wdeployments'][index]['image']", "get remote config, given possibly new local config try: assert", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc wdeployment_id =", "account') else: print('\\nregistered workspace deployments with this user account:') for", "'dissolve': dissolve_parser.print_help() elif argv_parsed.help_target_command == 'status': status_parser.print_help() elif argv_parsed.help_target_command ==", "['yaml', 'json']: print('output format unrecognized: {}'.format(argv_parsed.output_format)) return 1 else: output_format", "try `--list`')) config_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_config', help='list configuration') config_parser.add_argument('--local',", "reach daemon. Does it exist? (Try `hardshare status`)') return 1", "(can be unique prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true', default=False, help=('if there", "subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace", "not None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address) of registration: {}' .format(wd['origin']))", "help='remove add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark as unavailable; optionally", "given ID does not appear to be valid.') return 1", "to get a help message, enter\\n\\n hardshare help rules') return", "hardshare help addon-vnc') return 1 except ValueError as err: print('ERROR:", "len(config['wdeployments']) == 0: findings = [WorkspaceInstance.inspect_instance()] else: findings = []", "action='store_true', default=False, help='print version of hardshare (this) package.', dest='print_version') argparser.add_argument('-v',", "hardshare client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None, type=str, help=('special output formatting", "print('The given ID does not appear to be valid.') return", "' 'at {}'.format(ac.base_uri)) return 1 if 'err' in res: if", "without --purge.') terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv)", "exist') return 1 carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path", "is otherwise in a' ' non-recoverable state, then mark it", "as fp: tok = fp.read().strip() if argv_parsed.attach_camera_res: width, height =", "cprovider') return 1 else: # cprovider == 'proxy' print('ERROR: --assign-image", "is only 1 workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add add-on", "== 'proxy': print('--add-raw-device not supported for cprovider `proxy`') return 1", "== 'json': print(json.dumps(config)) elif output_format == 'yaml': print(yaml.dump(config, default_flow_style=False)) else:", "= config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--purge not supported for", "dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark as unavailable; optionally wait for current", "0: print('\\nno registered workspace deployments with this user account') else:", "status`)') return 1 return 0 elif argv_parsed.command == 'register': if", "== 0: print(('ERROR: no workspace deployment in local configuration.')) return", "from .mgmt import find_wd, modify_local, rm_wd from .api import HSAPIClient", "is None: print('ERROR: given prefix does not match precisely 1", "' 'has the wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path + '.pub' ))", "config, None, 1 indices.append(index) index = indices elif id_prefix: index", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config)", "there is only 1 workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res',", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "be valid.') return 1 wdid = config['wdeployments'][index]['id'] if argv_parsed.list_rules: try:", "'yaml': print(yaml.dump(config, default_flow_style=False)) else: if 'local' not in config: config", "'json']: print('output format unrecognized: {}'.format(argv_parsed.output_format)) return 1 else: output_format =", "of target workspace deployment' ' (can be unique prefix)')) advertise_commanddesc", "if wd['desc'] is not None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address) of", "argv_parsed.command == 'stop-cameras': local_keys = list_local_keys() if len(local_keys) < 1:", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc normalized_path", "'rules': rules_parser.print_help() elif argv_parsed.help_target_command == 'register': register_parser.print_help() elif argv_parsed.help_target_command ==", "ClientConnectorError as ConnectionError from .core import WorkspaceInstance from .mgmt import", "action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed = argparser.parse_args(argv) if argv_parsed.print_version or", "('y', 'yes'): print(('Do you want to dissolve {}? This action", "len(local_keys) < 1: print('No valid keys available. Check: `hardshare config", "a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 #", "not required ' 'if there is only 1 workspace deployment'))", "id_prefix: index = find_wd(config, id_prefix) if index is None: print('ERROR:", "'default depends on the supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config',", "to permit access by anyone? [y/N] ', end='') ui_input =", "= 'register new workspace deployment' register_parser = subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc)", "0: return rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path): print('ERROR:", "last resort. First, try `hardshare' ' terminate` without --purge.') terminate_parser.add_argument('--purge',", "in a' ' non-recoverable state, then mark it remotely as'", "of this workspace deployment' check_parser = subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix',", "import ClientConnectorError as ConnectionError from .core import WorkspaceInstance from .mgmt", "1 wdid = config['wdeployments'][index]['id'] if argv_parsed.list_rules: try: res = ac.get_access_rules(wdid)", "given possibly new local config try: assert ac is not", "ui_input in ('n', 'no', ''): return 1 try: ac.add_access_rule(wdid, to_user='*')", "required ' 'if there is only 1 workspace deployment')) advertise_parser.add_argument('-d',", "addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment' ' (can", "fail if local configuration already ' 'has wdeployment declared')) check_commanddesc", "deployment to check' ' (can be unique prefix)')) dissolve_commanddesc =", "remote_config, } if 'local' in config: ref = config['local']['wdeployments'] else:", "' (THIS CANNOT BE UNDONE)') dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc)", "!= 0: return rc wdeployments = [config['wdeployments'][jj]['id'] for jj in", "argv_parsed.list_config: try: config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error loading configuration", "in `hardshare config -l` here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add new", "deployment {}'.format(res['id'])) print('\\tcreated: {}'.format(res['date_created'])) print('\\torigin (address) of registration: {}'.format(res['origin'])) if", "not valid; to get list of' ' files with errors,", "except Exception as err: print('{}'.format(err)) return 1 elif argv_parsed.add_rule_permit_all: ui_input", "config = { 'local': config, 'remote': None, } print('workspace deployments", "1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside: config, index, rc = get_config_with_index(argv_parsed.id_prefix)", "1 try: res = ac.check_registration(argv_parsed.id_prefix) except: print('Error occurred while contacting", "'register': register_parser.print_help() elif argv_parsed.help_target_command == 'check': check_parser.print_help() elif argv_parsed.help_target_command ==", "in ['yaml', 'json']: print('output format unrecognized: {}'.format(argv_parsed.output_format)) return 1 else:", "{}'.format(wd['date_created'])) if wd['desc'] is not None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address)", "%(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if argv is None: argv = sys.argv[1:]", "of more than 1 wdeployment; ' 'default is to fail", "not None: if argv_parsed.help_target_command == 'config': config_parser.print_help() elif argv_parsed.help_target_command ==", "' 'options: YAML , JSON'), dest='output_format') subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version',", "None: selected_cprovider = argv_parsed.cprovider.lower() if selected_cprovider not in ['docker', 'podman',", "'err_keys' in config['local'] and len(config['local']['err_keys']) > 0: print('found possible keys", "not in config: config = { 'local': config, 'remote': None,", "defined.') return config, None, 1 index = 0 return config,", "fp: tok = fp.read().strip() try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif", "if argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--purge", "workspace deployment to dissolve') status_commanddesc = 'get status of local", "modify_local(config) elif argv_parsed.add_init_inside is not None: config, index, rc =", "{}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], )) if wdeployment['cprovider'] in", "{}'.format(res['date_dissolved'])) elif argv_parsed.command == 'dissolve': if ac is None: print('no", "help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False, help=('stop all attached cameras associated", "local clean-up; this' ' command is a last resort. First,", "Apache License, Version 2.0 (the \"License\"); # you may not", "it.', ] if output_format == 'json': print(json.dumps(res)) else: # output_format", "either express or implied. # See the License for the", "License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "type=str, dest='cprovider', default=None, help='select a container provider: docker, podman, proxy')", "with a switch. For example, `hardshare config -l`') print('or to", "argv_parsed.command == 'rules': if ac is None: print('no local configuration", "' unavailable for any future use' ' (THIS CANNOT BE", "streams previously started by attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc)", "is' ' hosted here. (this only works if it' '", "argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config) else: crop = None if argv_parsed.become_daemon:", "= config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-raw-device not supported for", "None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path is not None: config, index,", "this user account:') for wd in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created']))", "is not None: assert ac is not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config()", "help='add device file to present in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str,", "argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid)", "argparser.parse_args(argv) if argv_parsed.print_version or argv_parsed.command == 'version': from . import", "mistyproxy to allow HTTP proxy to Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip',", "description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace", "err in errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token)", "{}\\n\\tcprovider: {}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], )) if", "if rc != 0: return rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if", "== 'proxy' print('ERROR: --assign-image not supported for cprovider `proxy`') return", "wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings = [] for m", "1 workspace deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking", "a help message, enter\\n\\n hardshare help addon-vnc') return 1 except", "return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider not in ['docker',", "elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else: print('Use `hardshare addon-vnc` with a", "id prefix {}' .format(res['id_prefix'])) elif res['err'] == 'wrong authorization token':", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc if argv_parsed.purge_supposed_instance:", "argv_parsed.cprovider.lower() if selected_cprovider not in ['docker', 'podman', 'proxy']: print('ERROR: cprovider", "1 workspace deployment')) rules_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_rules', help='list all", "err_key_path)) if config['remote']: if 'err' in config['remote']: print('Error occurred while", "'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid:", "workspace deployment') return config, None, 1 indices.append(index) index = indices", "in res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command == 'dissolve': if ac", "attach_camera_parser.add_argument('camera', default=0, type=int, help=('on Linux, 0 typically implies /dev/video0; '", "wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], )) if wdeployment['cprovider'] in ['docker', 'podman']:", "`hardshare config -h`)') return 1 if argv_parsed.id_prefix is None: wdid", "('n', 'no', ''): return 1 try: res = ac.dissolve_registration(wdid) except:", "config -h`)') return 1 if argv_parsed.id_prefix is None: wdid =", "default=False, help=('stop all attached cameras associated with this ' 'user", "help message, enter\\n\\n hardshare help addon-cmdsh') return 1 except ValueError", "if 'ssh_key' not in config or config['ssh_key'] is None: print('WARNING:", "if config['remote']: if 'err' in config['remote']: print('Error occurred while contacting", "in ['docker', 'podman', 'proxy']: print('ERROR: cprovider must be one of", "'manage add-on vnc for your workspace deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc',", "help='remove add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage add-on vnc for", "workspace deployment with id prefix {}' .format(res['id_prefix'])) elif res['err'] ==", "0 elif argv_parsed.command == 'register': if ac is None: print('cannot", "'image', 'exists', argv_parsed.cprovider_img]) if cp_images.returncode != 0: print('ERROR: given image", "print('ERROR: given image name is not recognized by cprovider') return", "= subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img]) if cp_images.returncode != 0: print('ERROR:", "unique prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true', default=False, help=('if there is an", "not in ['docker', 'podman', 'proxy']: print('unknown cprovider: {}'.format(cprovider)) return 1", "return 0 elif argv_parsed.command == 'register': if ac is None:", "action='store_true', default=False, help='remove add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage add-on", "= HSAPIClient() except: ac = None if argv_parsed.command == 'status':", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "print('failed to detect local instance') return 1 else: if ac", "and exit.') help_parser = subparsers.add_parser('help', help='print this help message and", "'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif argv_parsed.help_target_command == 'ad': advertise_parser.print_help() elif argv_parsed.help_target_command ==", "output_format == 'json': print(json.dumps(res)) else: # output_format == 'yaml' print(yaml.dump(res,", "argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help() elif", "from list of commands to execute; ' 'for example, '", "{}\\n\\tcargs: {}'.format( wdeployment['id'], wdeployment['url'], wdeployment['owner'], wdeployment['cprovider'], wdeployment['cargs'], )) if wdeployment['cprovider']", "this help message and exit') argparser.add_argument('-V', '--version', action='store_true', default=False, help='print", "None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0:", "workspace deployment' ' (can be unique prefix)')) advertise_commanddesc = 'advertise", "deployments' addon_vnc_parser = subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "None, 1 if len(config['wdeployments']) == 0: print(('ERROR: no workspace deployment", "your workspace deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID',", "'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command ==", "jj, wdeployment in enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format ==", "index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: print('The given", "for jj, wdeployment in enumerate(ref): ref[jj]['url'] = 'https://rerobots.net/workspace/{}'.format(wdeployment['id']) if output_format", "help='remove add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage add-on mistyproxy for", "try: ac = HSAPIClient() except: ac = None if argv_parsed.command", "{}'.format(wd['desc'])) print('\\torigin (address) of registration: {}' .format(wd['origin'])) if wd['dissolved']: print('\\tdissolved:", "given prefix does not match precisely 1 workspace deployment') return", ".format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command == 'stop-ad': config, index,", "only 1 workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add add-on cmdsh", "' non-recoverable state, then mark it remotely as' ' terminated", "is found, then create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None, help='add", "= { 'local': config, 'remote': remote_config, } if 'local' in", "config['wdeployments'][index]['init_inside'] = [] modify_local(config) elif argv_parsed.cprovider is not None: selected_cprovider", "rules` with a switch. For example, `hardshare rules -l`') print('or", "one of the following: docker, podman, proxy') return 1 config,", "`hardshare config --create`)') return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as", "output_format == 'json': print(json.dumps(config)) elif output_format == 'yaml': print(yaml.dump(config, default_flow_style=False))", "instantiations by anyone') register_commanddesc = 'register new workspace deployment' register_parser", "to use (advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path', default=None, help='remove", "reach server. Are you connected to the Internet?') return 1", "print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command == 'attach-camera': config, indices, rc =", "by you (the owner)') rules_parser.add_argument('--drop-all', action='store_true', default=False, dest='drop_all_rules', help=('remove all", "if argv_parsed.id_prefix is None: wdid = None else: try: wdid", "= logging.handlers.WatchedFileHandler(filename='hardshare_client.log', mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' '", "Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on mistyproxy', dest='rm_addon_mistyproxy')", "detect local instance') return 1 else: if ac is None:", "else: findings = [] for wd in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else:", "# output_format == 'yaml' print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me:", "capabilities or permissions)' rules_parser = subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix', metavar='ID',", "height = None, None if argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config) else:", "non-recoverable state, then mark it remotely as' ' terminated and", "value shown in `hardshare config -l` here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token',", "argv_parsed.command == 'stop-ad': config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc", "as daemon)', dest='become_daemon') stop_cameras_commanddesc = 'stop camera streams previously started", "index = find_wd(config, idp) if index is None: print('ERROR: given", "only works if it' ' has been previously registered under'", "if 'local' in config: ref = config['local']['wdeployments'] else: ref =", "None, 1 if isinstance(id_prefix, list): if len(id_prefix) == 0: if", "rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-init-inside not", "cprovider') return 1 elif cprovider == 'docker': cp_images = subprocess.run([cprovider,", "is not None: selected_cprovider = argv_parsed.cprovider.lower() if selected_cprovider not in", "target workspace deployment' ' (can be unique prefix)')) terminate_parser.add_argument('-f', '--force',", "'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 if not os.path.exists(argv_parsed.raw_device_path): print('ERROR:", "{}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except: print('failed to add", "be unique prefix)')) dissolve_commanddesc = ('dissolve this workspace deployment, making", "in config['wdeployments']: findings.append(WorkspaceInstance.inspect_instance(wdeployment=wd)) else: findings = [] for m in", "without valid API client') return 1 try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError:", "err in config['local']['err_keys'].items(): print('\\t {}: {}'.format(err, err_key_path)) if config['remote']: if", "help addon-mistyproxy') return 1 except ValueError as err: print('ERROR: {}'.format(err))", "(can be unique prefix)')) advertise_commanddesc = 'advertise availability, accept new", "cprovider == 'docker': cp_images = subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL,", "-h`)') return 1 try: wdid = str(uuid.UUID(argv_parsed.wdid)) except: print('The given", "program to list of commands to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog',", "return 1 elif argv_parsed.command == 'ad': if ac is None:", "argv_parsed.help_target_command == 'register': register_parser.print_help() elif argv_parsed.help_target_command == 'check': check_parser.print_help() elif", "argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else: print('Use `hardshare", "import os import os.path import subprocess import sys import uuid", "logging import logging.handlers import os import os.path import subprocess import", "workspace deployment') return config, None, 1 else: if len(config['wdeployments']) >", "unique prefix)')) dissolve_commanddesc = ('dissolve this workspace deployment, making it'", "addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP address of the Misty robot', dest='targetaddr')", "addon-vnc` with a switch.') print('To get a help message, enter\\n\\n", "fp: tok = fp.read().strip() if argv_parsed.attach_camera_res: width, height = [int(x)", "'if there is only 1 workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False,", "config_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_config', help='list configuration') config_parser.add_argument('--local', action='store_true', default=False,", "if rc != 0: return rc wdeployments = [config['wdeployments'][jj]['id'] for", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path)", "None: print('ERROR: given prefix does not match precisely 1 workspace", "configuration data') config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved', help='include configuration data of", "to Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP address of", "add_cmdsh(wdeployment_id, tok) elif argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else: print('Use `hardshare addon-cmdsh`", "use this file except in compliance with the License. #", "workspace deployment')) config_parser.add_argument('-c', '--create', action='store_true', default=False, dest='create_config', help='if no local", "deployment' check_parser = subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "default=None, help=('remove program from list of commands to execute; '", "hardshare client'), add_help=False) argparser.add_argument('-h', '--help', dest='print_help', action='store_true', default=False, help='print this", "metavar='IMG', type=str, dest='cprovider_img', default=None, help='assign image for cprovider to use", "workspace deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare that workspace deployment", "there is only 1 workspace deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False,", "as err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'ad':", "is required with --add') return 1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif", "attach' ' (can be unique prefix); ' 'this argument is", "help='print version of hardshare (this) package.', dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true',", "with connection type sshtun cannot launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if", "not None: output_format = argv_parsed.output_format.lower() if output_format not in ['yaml',", "For example, `hardshare rules -l`') print('or to get a help", "interface' ' for the hardshare client'), add_help=False) argparser.add_argument('-h', '--help', dest='print_help',", "not started on this host'), dest='all_cameras') addon_cmdsh_commanddesc = 'manage add-on", "the hardshare client'), add_help=False) argparser.add_argument('-h', '--help', dest='print_help', action='store_true', default=False, help='print", "{}? This action cannot be undone. ' '[y/N] ').format(wdid), end='')", "{docker, podman} if config['wdeployments'][index]['image'] is None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config)", "subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace", "deployment' ' (can be unique prefix); ' 'this argument is", "CANNOT BE UNDONE)') dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID',", "!= 0: return rc if argv_parsed.purge_supposed_instance: cprovider = config['wdeployments'][index]['cprovider'] if", "# Remove from local configuration, if present rm_wd(get_local_config(), wdid, save=True)", "print('Try config -l --local to only get local information') return", "Did it expire?') else: print(config['remote']['err']) return 1 if len(config['remote']['deployments']) ==", "found. (try `hardshare config -h`)') return 1 try: wdid =", "to SSH key pair (does NOT copy the key)') config_parser.add_argument('--add-raw-device',", "type=str, dest='add_init_inside', default=None, help='add command to be executed inside container')", "rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None): try: config =", "argv_parsed.help_target_command == 'rules': rules_parser.print_help() elif argv_parsed.help_target_command == 'register': register_parser.print_help() elif", "if 'container' in findings: try: subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL,", "'local' not in config: config = { 'local': config, 'remote':", "except: print('ERROR: {} or {} does not exist or '", "'including to you (the owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all', help='permit", "1 index = [0] else: indices = [] for idp", "workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res', default=None, help=('width and height", "YAML , JSON'), dest='output_format') subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version", "= get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error loading configuration data.' ' does", "camera, then try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None, help=('id of", "logging.handlers import os import os.path import subprocess import sys import", "None, } print('workspace deployments defined in local configuration:') if len(config['local']['wdeployments'])", "config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path', default=None, help='add device file to present", "rc wdeployments = [config['wdeployments'][jj]['id'] for jj in indices] local_keys =", "try: config = get_local_config() except: print('error loading configuration data. does", "cprovider to use (advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path', default=None,", "elif output_format == 'yaml': print(yaml.dump(config, default_flow_style=False)) else: if 'local' not", "dest='rm_init_inside', help='remove (empty) list of commands for inside initialization') config_parser.add_argument('-p',", "of captured images; ' 'default depends on the supporting drivers'))", "in config['local']['err_keys'].items(): print('\\t {}: {}'.format(err, err_key_path)) if config['remote']: if 'err'", "config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use `hardshare config` with a switch. For", "stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False, help=('stop all attached cameras associated with", "2018 rerobots, Inc. # # Licensed under the Apache License,", "`proxy`') return 1 elif cprovider not in ['docker', 'podman']: print('unknown", "print('--add-raw-device not supported for cprovider `proxy`') return 1 elif cprovider", "{ 'local': config, 'remote': None, } print('workspace deployments defined in", "1: print('No valid keys available. Check: `hardshare config -l`') return", "subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target", "elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else: print('Use `hardshare addon-mistyproxy` with a", "config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider', default=None, help='select a container provider: docker,", "to_user='*') except Exception as err: print('{}'.format(err)) return 1 else: print('Use", "remote config, given possibly new local config try: assert ac", "= subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode !=", "indices.append(index) index = indices elif id_prefix: index = find_wd(config, id_prefix)", "default=None, help=('id of workspace deployment' ' (can be unique prefix);", "return 0 os.close(0) os.close(1) os.close(2) try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera, width=width,", "config, None, 1 index = [0] else: indices = []", "in compliance with the License. # You may obtain a", "under the License. \"\"\"Command-line interface \"\"\" import argparse import json", "elif argv_parsed.help_target_command == 'stop-ad': terminate_parser.print_help() else: argparser.print_help() else: argparser.print_help() return", "= [] for m in find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if", "of registration: {}'.format(res['origin'])) if 'date_dissolved' in res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif", "prefix does not match precisely 1 workspace deployment') return config,", "software # distributed under the License is distributed on an", "wdeployment declared')) check_commanddesc = 'check registration of this workspace deployment'", "/dev/video0; ' 'if you only have one camera, then try", "' terminate` without --purge.') terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed", "else: if ac is None: print('cannot terminate without valid API", "description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None, help='id of workspace deployment", "!= 0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use `hardshare config`", "elif argv_parsed.command == 'dissolve': if ac is None: print('no local", "' but there is not one or it is otherwise", "and daemon' status_parser = subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "wdeployments get full images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from", "full images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking terminal", "executed inside container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside', help='remove (empty) list", "print('--ip is required with --add') return 1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr)", "get_config_with_index(id_prefix=None): try: config = get_local_config() except: print('error loading configuration data.", "be unique prefix)')) terminate_parser.add_argument('-f', '--force', action='store_true', default=False, help=('if there is", "config -h`)') return 1 try: wdid = str(uuid.UUID(argv_parsed.wdid)) except: print('The", "dest='add_terminate_prog', default=None, help='add program to list of commands to execute')", "os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname,", "if it' ' has been previously registered under' ' the", "if rc != 0: return rc cprovider = config['wdeployments'][index]['cprovider'] if", "as err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command == 'addon-mistyproxy':", "None while ui_input not in ('y', 'yes'): print(('Do you want", "terminate` without --purge.') terminate_parser.add_argument('--purge', action='store_true', default=False, help=help_message_purge, dest='purge_supposed_instance') argv_parsed =", "< 1: print('No valid keys available. Check: `hardshare config -l`')", "index = 0 return config, index, 0 def main(argv=None): pkglogger", "try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as err: print('ERROR: {}'.format(err)) return 1", "as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr", "connected to the Internet?') return 1 elif argv_parsed.command == 'rules':", "config try: assert ac is not None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved)", "' 'if there is only 1 workspace deployment')) config_parser.add_argument('-c', '--create',", "'container' in findings: try: subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)", "= 'advertise availability, accept new instances' advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc,", "= find_wd(config, idp) if index is None: print('ERROR: given prefix", "addon_vnc_commanddesc = 'manage add-on vnc for your workspace deployments' addon_vnc_parser", "enable terminal access via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove", "0: print('The given ID does not appear to be valid.')", "get local information') return 1 config = { 'local': config,", "for inside initialization') config_parser.add_argument('-p', '--prune', action='store_true', default=False, dest='prune_err_keys', help=('delete files", "{}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'] = [] modify_local(config) elif argv_parsed.cprovider is", "add_ssh_path, list_local_keys from .mgmt import find_wd, modify_local, rm_wd from .api", "to advertise' ' (can be unique prefix); ' 'this argument", "crop configuration; ' 'default: all wdeployments get full images')) attach_camera_parser.add_argument('-d',", "rc != 0: return rc config['wdeployments'][index]['cprovider'] = selected_cprovider if selected_cprovider", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc if", "for terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if len(config['local']['keys']) ==", "get_local_config() except: print('error loading configuration data. does it exist?') return", "# # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "+ '\\n\\t'.join(config['local']['keys'])) if 'err_keys' in config['local'] and len(config['local']['err_keys']) > 0:", "list of commands to execute; ' 'for example, ' 'copy-and-paste", "%(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif argv_parsed.command ==", "0: return rc if 'ssh_key' not in config or config['ssh_key']", "return config, None, 1 index = [0] else: indices =", "{}'.format(err)) return 1 except ConnectionError: print('ERROR: failed to reach server.", "list_local_keys(collect_errors=True) for err_key_path, err in errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif", "'register new workspace deployment' register_parser = subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more',", "0: print('\\t(none)') else: for wdeployment in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider:", "workspace deployment')) addon_vnc_parser.add_argument('--add', action='store_true', default=False, help='add add-on vnc to enable", "< 1 or height < 1: print('Width, height must be", "to execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove program from list", "except ValueError as err: print('ERROR: {}'.format(err)) return 1 elif argv_parsed.command", "Internet?') return 1 elif argv_parsed.command == 'addon-cmdsh': if ac is", "get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error loading configuration data.' ' does it", "with a switch. For example, `hardshare rules -l`') print('or to", "add_cmdsh, rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None): try: config", "= argparse.ArgumentParser(description=('Command-line interface' ' for the hardshare client'), add_help=False) argparser.add_argument('-h',", "return 1 try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR: cannot reach daemon.", "type=int, help=('on Linux, 0 typically implies /dev/video0; ' 'if you", "config['wdeployments'][index]['cprovider'] = selected_cprovider if selected_cprovider == 'proxy': config['wdeployments'][index]['image'] = None", "configuration.')) return config, None, 1 if isinstance(id_prefix, list): if len(id_prefix)", "loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler)", "with the License. # You may obtain a copy of", "there is only 1 workspace deployment')) config_parser.add_argument('-c', '--create', action='store_true', default=False,", "in config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc'] is not None:", "= ('dissolve this workspace deployment, making it' ' unavailable for", "not argv_parsed.become_daemon: print('ERROR: failed to reach server. Are you connected", "declared')) check_commanddesc = 'check registration of this workspace deployment' check_parser", "of workspace deployment' ' (can be unique prefix); ' 'this", "deployment for configuration changes' ' (can be unique prefix); '", "None: wdid = None else: try: wdid = str(uuid.UUID(argv_parsed.id_prefix)) except:", "inside initialization') config_parser.add_argument('-p', '--prune', action='store_true', default=False, dest='prune_err_keys', help=('delete files in", "print('ERROR: cannot reach daemon. Does it exist? (Try `hardshare status`)')", "API token. Did it expire?') else: print(config['remote']['err']) return 1 if", "use (advanced option)') config_parser.add_argument('--rm-raw-device', metavar='PATH', type=str, dest='remove_raw_device_path', default=None, help='remove device", "address of the Misty robot', dest='targetaddr') addon_mistyproxy_parser.add_argument('--rm', action='store_true', default=False, help='remove", "does not exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config) elif argv_parsed.rm_terminate_prog is", "`hardshare config -l`') print('or to get a help message, enter\\n\\n", "hardshare help addon-cmdsh') return 1 except ValueError as err: print('ERROR:", "in local key directory that' ' are not valid; to", "advertise' ' (can be unique prefix); ' 'this argument is", "print('No valid keys available. Check: `hardshare config -l`') return 1", "exist? (Try `hardshare status`)') return 1 return 0 elif argv_parsed.command", "status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of target workspace deployment' '", "print('\\t(none)') else: for wdeployment in config['local']['wdeployments']: print('{}\\n\\turl: {}\\n\\towner: {}\\n\\tcprovider: {}\\n\\tcargs:", "'proxy': config['wdeployments'][index]['image'] = None else: # selected_cprovider \\in {docker, podman}", "1 if argv_parsed.id_prefix is None: if len(config['wdeployments']) == 0: findings", "deployments with this user account') else: print('\\nregistered workspace deployments with", "{});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) return ac.run_sync(config['wdeployments'][index]['id']) elif", "elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception as err: print('{}'.format(err)) return 1", "indices] local_keys = list_local_keys() if len(local_keys) < 1: print('No valid", "print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if len(config['local']['keys']) == 0: print('\\t(none)') else: print('\\t'", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "local config try: assert ac is not None remote_config =", "{}'.format(err)) return 1 elif argv_parsed.command == 'addon-vnc': if ac is", "enter\\n\\n hardshare help addon-vnc') return 1 except ValueError as err:", "connected to the Internet?') return 1 elif argv_parsed.command == 'stop-cameras':", "== 'stop-ad': terminate_parser.print_help() else: argparser.print_help() else: argparser.print_help() return 0 if", "argparser.add_argument('-v', '--verbose', action='store_true', default=False, help='print verbose messages about actions by", "help='detach from invoking terminal (i.e., run as daemon)', dest='become_daemon') stop_cameras_commanddesc", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc wdeployments = [config['wdeployments'][jj]['id']", "it' ' unavailable for any future use' ' (THIS CANNOT", "expire?') else: print(res['err']) return 1 # Remove from local configuration,", "help config') return 1 return 0 if __name__ == '__main__':", "cprovider == 'proxy': print('--rm-init-inside not supported for cprovider `proxy`') return", "(this only works if it' ' has been previously registered", "unrecognized: {}'.format(argv_parsed.output_format)) return 1 else: output_format = None try: ac", "(address) of registration: {}'.format(res['origin'])) if 'date_dissolved' in res: print('\\tdissolved: {}'.format(res['date_dissolved']))", "help message and exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?') config_commanddesc =", "ac.get_access_rules(wdid) except Exception as err: print('{}'.format(err)) return 1 if 'err'", "--local to only get local information') return 1 config =", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "return 1 config = { 'local': config, 'remote': remote_config, }", "'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command ==", "server ' 'at {}'.format(ac.base_uri)) return 1 if 'err' in res:", "API client') return 1 try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR: cannot", "is only 1 workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add add-on", "len(config['wdeployments']) > 1: print('ERROR: ambiguous command: more than 1 workspace", "argv_parsed.list_rules: try: res = ac.get_access_rules(wdid) except Exception as err: print('{}'.format(err))", "width=width, height=height, crop=crop) except ConnectionError: if not argv_parsed.become_daemon: print('ERROR: failed", "elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me:", "'addon-vnc': if ac is None: print('cannot register without initial local", "deployment on which to attach' ' (can be unique prefix);", "it expire?') else: print(res['err']) return 1 res['comments'] = [ 'Access", "required ' 'if there is only 1 workspace deployment')) config_parser.add_argument('-c',", "CONDITIONS OF ANY KIND, either express or implied. # See", "message, enter\\n\\n hardshare help addon-vnc') return 1 except ValueError as", "if ui_input in ('n', 'no', ''): return 1 try: res", "for cprovider `proxy`') return 1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config) elif", "'addon-mistyproxy': if ac is None: print('cannot register without initial local", "1 if len(config['remote']['deployments']) == 0: print('\\nno registered workspace deployments with", "dissolve') status_commanddesc = 'get status of local instances and daemon'", "wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _, errored_keys = list_local_keys(collect_errors=True) for", "image name is not recognized by cprovider') return 1 else:", "nargs='?', default=None, help=('id of target workspace deployment' ' (can be", "dest='add_rule_permit_all', help='permit instantiations by anyone') register_commanddesc = 'register new workspace", "except: print('error loading configuration data. does it exist?') return 1", "== 0: print('\\nno registered workspace deployments with this user account')", "BE UNDONE)') dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?',", "argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command == 'addon-mistyproxy': addon_mistyproxy_parser.print_help() elif", "register_parser.print_help() elif argv_parsed.help_target_command == 'check': check_parser.print_help() elif argv_parsed.help_target_command == 'dissolve':", "res: if res['err'] == 'not found': print('not found: workspace deployment", "API token. Did it expire?') else: print(res['err']) return 1 else:", "try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR: cannot reach daemon. Does it", "same user account.)')) rules_commanddesc = 'modify access rules (also known", "(try `hardshare config --create`)') return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError", "print('ERROR: given device file does not exist') return 1 carg", "vnc to enable VNC via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False,", "' 'if there is only 1 workspace deployment')) advertise_parser.add_argument('-d', '--daemon',", "terminal access via WebSockets', dest='add_addon_cmdsh') addon_cmdsh_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on", "elif argv_parsed.rm_init_inside: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc !=", "prefix {}' .format(res['id_prefix'])) elif res['err'] == 'wrong authorization token': print('wrong", ".mgmt import get_local_config, add_key, add_ssh_path, list_local_keys from .mgmt import find_wd,", "occurred while contacting rerobots servers') print('Try config -l --local to", "selected_cprovider not in ['docker', 'podman', 'proxy']: print('ERROR: cprovider must be", ".mgmt import find_wd, modify_local, rm_wd from .api import HSAPIClient from", "else: argparser.print_help() else: argparser.print_help() return 0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: print('The given ID does", "argv_parsed.new_ssh_path + '.pub' )) return 1 elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif", "local configuration does not declare SSH key.\\n' 'Instances with connection", "to be valid.') return 1 wdid = config['wdeployments'][index]['id'] if argv_parsed.list_rules:", "-l`') print('or to get a help message, enter\\n\\n hardshare help", "1 else: # cprovider == 'proxy' print('ERROR: --assign-image not supported", "if len(local_keys) < 1: print('No valid keys available. Check: `hardshare", "height must be positive') return 1 else: width, height =", "for x in argv_parsed.attach_camera_res.split(',')] if width < 1 or height", "print('unknown cprovider: {}'.format(cprovider)) return 1 if cprovider == 'podman': cp_images", "= argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog is not None: config, index,", "if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception as err:", "x in argv_parsed.attach_camera_res.split(',')] if width < 1 or height <", "container `{}`'.format(findings['container']['name'])) return 1 return 0 else: print('failed to detect", "argv_parsed.help_target_command == 'addon-cmdsh': addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help() elif", "keys available. Check: `hardshare config -l`') return 1 with open(local_keys[0],", "is None or argv_parsed.command == 'help': if hasattr(argv_parsed, 'help_target_command') and", "is not required ' 'if there is only 1 workspace", "import json import logging import logging.handlers import os import os.path", "configuration data. does it exist?') return None, None, 1 if", "get full images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking", "__version__ as hardshare_pkg_version print(hardshare_pkg_version) return 0 elif argv_parsed.command is None", "help=('declare that workspace deployment is' ' hosted here. (this only", "of dissolved workspace deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare that", "message and exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?') config_commanddesc = 'manage", "= 'manage add-on vnc for your workspace deployments' addon_vnc_parser =", "loading configuration data. does it exist?') return 1 if argv_parsed.id_prefix", "else: output_format = None try: ac = HSAPIClient() except: ac", "'remote': None, } print('workspace deployments defined in local configuration:') if", "no special formatting); ' 'options: YAML , JSON'), dest='output_format') subparsers", "add-on mistyproxy to allow HTTP proxy to Misty robots', dest='add_addon_mistyproxy')", "and exit') help_parser.add_argument('help_target_command', metavar='COMMAND', type=str, nargs='?') config_commanddesc = 'manage local", "help=('id of target workspace deployment' ' (can be unique prefix)'))", "1 # Remove from local configuration, if present rm_wd(get_local_config(), wdid,", "default=False, help='remove add-on mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark as unavailable;", "mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' ' %(asctime)s ;", "{}'.format(cprovider)) return 1 if cprovider == 'podman': cp_images = subprocess.run([cprovider,", "permissions and # limitations under the License. \"\"\"Command-line interface \"\"\"", "return 1 if not argv_parsed.only_local_config: # Try to get remote", "default=False, help='remove add-on vnc', dest='rm_addon_vnc') addon_mistyproxy_commanddesc = 'manage add-on mistyproxy", "if os.fork() != 0: return 0 os.close(0) os.close(1) os.close(2) else:", "message, enter\\n\\n hardshare help addon-mistyproxy') return 1 except ValueError as", "cprovider: {}'.format(cprovider)) return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside: config, index,", "config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment for configuration", "resort. First, try `hardshare' ' terminate` without --purge.') terminate_parser.add_argument('--purge', action='store_true',", "if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is None: print('--ip is required with", "is not None ac.declare_existing(argv_parsed.declared_wdeployment_id) ac.sync_config() elif argv_parsed.raw_device_path is not None:", "local and remote configuration' config_parser = subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix',", "metavar='ID', nargs='?', default=None, help=('id of target workspace deployment' ' (can", "0 os.close(0) os.close(1) os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler", "find_wd(config, id_prefix) if index is None: print('ERROR: given prefix does", "print('ERROR: given prefix does not match precisely 1 workspace deployment')", "deployment with id prefix {}' .format(res['id_prefix'])) elif res['err'] == 'wrong", "' 'default: all wdeployments get full images')) attach_camera_parser.add_argument('-d', '--daemon', action='store_true',", "check_commanddesc = 'check registration of this workspace deployment' check_parser =", "ac.drop_access_rules(wdid) elif argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception as err: print('{}'.format(err)) return", "help=('image crop configuration; ' 'default: all wdeployments get full images'))", "print('\\torigin (address) of registration: {}'.format(res['origin'])) if 'date_dissolved' in res: print('\\tdissolved:", "enter\\n\\n hardshare help config') return 1 return 0 if __name__", "defined in local configuration:') if len(config['local']['wdeployments']) == 0: print('\\t(none)') else:", "print('found possible keys with errors:') for err_key_path, err in config['local']['err_keys'].items():", "precisely 1 workspace deployment') return config, None, 1 else: if", "print('failed to stop container `{}`'.format(findings['container']['name'])) return 1 return 0 else:", "default=None, help='select a container provider: docker, podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG',", "> 0: print('found possible keys with errors:') for err_key_path, err", ")) return 1 elif argv_parsed.create_config: get_local_config(create_if_empty=True) elif argv_parsed.declared_wdeployment_id is not", "rc != 0: return rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config)", "name is not recognized by cprovider') return 1 else: #", "any future use' ' (THIS CANNOT BE UNDONE)') dissolve_parser =", "data. does it exist?') return None, None, 1 if len(config['wdeployments'])", "workspace deployment to check' ' (can be unique prefix)')) dissolve_commanddesc", "help message, enter\\n\\n hardshare help addon-vnc') return 1 except ValueError", "host'), dest='all_cameras') addon_cmdsh_commanddesc = 'manage add-on cmdsh for your workspace", "addon-mistyproxy` with a switch.') print('To get a help message, enter\\n\\n", "help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one', help=('permit registration of more than", "package.', dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true', default=False, help='print verbose messages about", "optionally wait for current instance to finish' terminate_parser = subparsers.add_parser('stop-ad',", "0: return rc config['wdeployments'][index]['terminate'].remove(argv_parsed.rm_terminate_prog) modify_local(config) else: print('Use `hardshare config` with", "client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None, type=str, help=('special output formatting (default", "advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "(i.e., run as daemon)', dest='become_daemon') stop_cameras_commanddesc = 'stop camera streams", "dest='become_daemon') attach_camera_commanddesc = 'attach camera stream to workspace deployments' attach_camera_parser", "return 1 add_mistyproxy(wdeployment_id, tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else:", "does not declare SSH key.\\n' 'Instances with connection type sshtun", "message, enter\\n\\n hardshare help rules') return 1 elif argv_parsed.command ==", "= config['wdeployments'][index]['id'] local_keys = list_local_keys() if len(local_keys) < 1: print('No", "rc != 0: print('The given ID does not appear to", "print(res['err']) return 1 res['comments'] = [ 'Access is denied unless", "print('\\nregistered workspace deployments with this user account:') for wd in", "if cprovider not in ['docker', 'podman', 'proxy']: print('unknown cprovider: {}'.format(cprovider))", "1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog is not None:", "elif argv_parsed.command == 'rules': if ac is None: print('no local", "print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc'] is not None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin", "for your workspace deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix',", "1 elif cprovider == 'docker': cp_images = subprocess.run([cprovider, 'image', 'inspect',", "return 1 if len(config['remote']['deployments']) == 0: print('\\nno registered workspace deployments", "config['remote']['deployments']: print('{}'.format(wd['id'])) print('\\tcreated: {}'.format(wd['date_created'])) if wd['desc'] is not None: print('\\tdesc:", "message and exit') argparser.add_argument('-V', '--version', action='store_true', default=False, help='print version of", "'if there is only 1 workspace deployment')) advertise_parser.add_argument('-d', '--daemon', action='store_true',", "print('Use `hardshare addon-vnc` with a switch.') print('To get a help", "Exception as err: print('{}'.format(err)) return 1 else: print('Use `hardshare rules`", "deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add add-on cmdsh to enable terminal", "fp.read().strip() if argv_parsed.attach_camera_res: width, height = [int(x) for x in", "if argv_parsed.attach_camera_res: width, height = [int(x) for x in argv_parsed.attach_camera_res.split(',')]", "action='store_true', default=False, help='remove add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage add-on", "help='remove (empty) list of commands for inside initialization') config_parser.add_argument('-p', '--prune',", "'podman', 'proxy']: print('ERROR: cprovider must be one of the following:", "help=('on Linux, 0 typically implies /dev/video0; ' 'if you only", "try: if argv_parsed.add_addon_vnc: add_vnc(wdeployment_id, tok) elif argv_parsed.rm_addon_vnc: rm_vnc(wdeployment_id, tok) else:", "metavar='W,H', type=str, dest='attach_camera_res', default=None, help=('width and height of captured images;", "have one camera, then try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None,", "'manage add-on cmdsh for your workspace deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh',", "in local configuration.')) return config, None, 1 if isinstance(id_prefix, list):", "print('ERROR: ambiguous command: more than 1 workspace deployment defined.') return", "is only 1 workspace deployment')) attach_camera_parser.add_argument('--width-height', metavar='W,H', type=str, dest='attach_camera_res', default=None,", "'json': print(json.dumps(config)) elif output_format == 'yaml': print(yaml.dump(config, default_flow_style=False)) else: if", "if 'date_dissolved' in res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command == 'dissolve':", "instance') return 1 else: if ac is None: print('cannot terminate", "1 try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR: cannot reach daemon. Does", "= get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc normalized_path =", "loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});' ' %(asctime)s ; %(message)s' .format(os.getpid())))", "get_config_with_index(argv_parsed.id_prefix) if rc != 0: print('The given ID does not", "(pid: {});' ' %(asctime)s ; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if argv", "elif id_prefix: index = find_wd(config, id_prefix) if index is None:", "the supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str, dest='attach_camera_crop_config', default=None, help=('image crop", ".addons import add_cmdsh, rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None):", "command to be executed inside container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside',", "messages about actions by the hardshare client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT',", "1 elif argv_parsed.command == 'addon-mistyproxy': if ac is None: print('cannot", "a rule explicitly permits it.', ] if output_format == 'json':", "to dissolve {}? This action cannot be undone. ' '[y/N]", "check_parser = subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id", "cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage add-on vnc for your workspace", "`proxy`') return 1 config['wdeployments'][index]['image'] = argv_parsed.cprovider_img modify_local(config) elif argv_parsed.add_terminate_prog is", "example, `hardshare config -l`') print('or to get a help message,", ")) if wdeployment['cprovider'] in ['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']:", "directory that' ' are not valid; to get list of'", "selected_cprovider = argv_parsed.cprovider.lower() if selected_cprovider not in ['docker', 'podman', 'proxy']:", "is None: config['wdeployments'][index]['image'] = 'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img is not", "workspace deployment')) rules_parser.add_argument('-l', '--list', action='store_true', default=False, dest='list_rules', help='list all rules')", "subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true', default=False, help=('stop all attached", "= subparsers.add_parser('config', description=config_commanddesc, help=config_commanddesc) config_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "cp_images = subprocess.run([cprovider, 'image', 'exists', argv_parsed.cprovider_img]) if cp_images.returncode != 0:", "argparser.print_help() return 0 if argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is not", "addon-mistyproxy') return 1 except ValueError as err: print('ERROR: {}'.format(err)) return", "there is only 1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add", "type sshtun cannot launch.') pkglogger.removeHandler(loghandler) if argv_parsed.become_daemon: if os.fork() !=", "help='add add-on cmdsh to enable terminal access via WebSockets', dest='add_addon_cmdsh')", "return 1 elif argv_parsed.command == 'stop-cameras': local_keys = list_local_keys() if", "argv_parsed.rm_addon_cmdsh: rm_cmdsh(wdeployment_id, tok) else: print('Use `hardshare addon-cmdsh` with a switch.')", "container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider', default=None, help='select a container provider:", "help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment to", "advertise_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking terminal (i.e., run", "' 'copy-and-paste value shown in `hardshare config -l` here')) config_parser.add_argument('--add-key',", "a switch. For example, `hardshare rules -l`') print('or to get", "' 'including to you (the owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all',", "'if you only have one camera, then try 0')) attach_camera_parser.add_argument('id_prefix',", "fp: tok = fp.read().strip() try: stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR:", "the hardshare client', dest='verbose') argparser.add_argument('--format', metavar='FORMAT', default=None, type=str, help=('special output", "= argv_parsed.cprovider.lower() if selected_cprovider not in ['docker', 'podman', 'proxy']: print('ERROR:", "(does NOT copy the key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path', default=None,", "res = ac.dissolve_registration(wdid) except: print('Error occurred while contacting remote server", "print('\\tterminate:') for terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if len(config['local']['keys'])", "= ac.get_access_rules(wdid) except Exception as err: print('{}'.format(err)) return 1 if", "must be one of the following: docker, podman, proxy') return", "!= 0: return rc normalized_path = os.path.abspath(argv_parsed.add_terminate_prog) if not os.path.exists(normalized_path):", "deployment') return config, None, 1 else: if len(config['wdeployments']) > 1:", "that workspace deployment is' ' hosted here. (this only works", "--create`)') return 1 try: print(ac.register_new(at_most_one=argv_parsed.register_at_most_one)) except HSError as err: print('ERROR:", "ac.add_access_rule(wdid) except Exception as err: print('{}'.format(err)) return 1 elif argv_parsed.add_rule_permit_all:", "of local instances and daemon' status_parser = subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc)", "not os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device file does not exist') return", "to allow HTTP proxy to Misty robots', dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS',", "default=None, help='id of workspace deployment to dissolve') status_commanddesc = 'get", "' 'if you only have one camera, then try 0'))", "rm_vnc, add_mistyproxy, rm_mistyproxy def get_config_with_index(id_prefix=None): try: config = get_local_config() except:", "except: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0:", "print('ERROR: given path does not exist') return 1 config['wdeployments'][index]['terminate'].append(normalized_path) modify_local(config)", "with id prefix {}' .format(res['id_prefix'])) elif res['err'] == 'wrong authorization", "elif argv_parsed.command == 'addon-mistyproxy': if ac is None: print('cannot register", "= 'attach camera stream to workspace deployments' attach_camera_parser = subparsers.add_parser('attach-camera',", "wdid, save=True) elif argv_parsed.command == 'config': if argv_parsed.list_config: try: config", "`hardshare config -l`') return 1 with open(local_keys[0], 'rt') as fp:", "; %(message)s' .format(os.getpid()))) pkglogger.addHandler(loghandler) if argv is None: argv =", "check_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment to check'", "is no special formatting); ' 'options: YAML , JSON'), dest='output_format')", "except HSError as err: print('ERROR: {}'.format(err)) return 1 except ConnectionError:", "== 'rules': if ac is None: print('no local configuration found.", "default=None, help='add device file to present in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER',", "< 1: print('Width, height must be positive') return 1 else:", "if rc != 0: return rc config['wdeployments'][index]['cprovider'] = selected_cprovider if", "config['ssh_key'] is None: print('WARNING: local configuration does not declare SSH", "!= 0: return rc if 'ssh_key' not in config or", "wdeployment['cargs'], )) if wdeployment['cprovider'] in ['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image'])) if", "== 'status': status_parser.print_help() elif argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command", "is not None: print('\\tdesc: {}'.format(wd['desc'])) print('\\torigin (address) of registration: {}'", "`hardshare rules -l`') print('or to get a help message, enter\\n\\n", "waiting'), dest='force_terminate') help_message_purge = ('if the server indicates that an", "'image', 'inspect', argv_parsed.cprovider_img], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if cp_images.returncode != 0: print('ERROR:", "for any future use' ' (THIS CANNOT BE UNDONE)') dissolve_parser", "server indicates that an instance is active,' ' but there", "not exist or ' 'has the wrong permissions.'.format( argv_parsed.new_ssh_path, argv_parsed.new_ssh_path", "action='store_true', default=False, dest='list_config', help='list configuration') config_parser.add_argument('--local', action='store_true', default=False, dest='only_local_config', help='only", "enable VNC via rerobots.net', dest='add_addon_vnc') addon_vnc_parser.add_argument('--rm', action='store_true', default=False, help='remove add-on", "== 'yaml' print(yaml.dump(findings, default_flow_style=False)) elif argv_parsed.command == 'attach-camera': config, indices,", "if not argv_parsed.become_daemon: print('ERROR: failed to reach server. Are you", "'--create', action='store_true', default=False, dest='create_config', help='if no local configuration is found,", "as fp: tok = fp.read().strip() try: if argv_parsed.add_addon_cmdsh: add_cmdsh(wdeployment_id, tok)", "be positive') return 1 else: width, height = None, None", "deployments' attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int, help=('on", "limitations under the License. \"\"\"Command-line interface \"\"\" import argparse import", "'yaml' print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules:", "execute') config_parser.add_argument('--rm-terminate-prog', metavar='PATH', dest='rm_terminate_prog', default=None, help=('remove program from list of", "argv_parsed.verbose: pkglogger.setLevel(logging.DEBUG) if argv_parsed.output_format is not None: output_format = argv_parsed.output_format.lower()", "with this user account') else: print('\\nregistered workspace deployments with this", "argv_parsed.command == 'version': from . import __version__ as hardshare_pkg_version print(hardshare_pkg_version)", "with errors:') for err_key_path, err in config['local']['err_keys'].items(): print('\\t {}: {}'.format(err,", "in ['docker', 'podman']: print('unknown cprovider: {}'.format(cprovider)) return 1 findings =", "print('\\t' + '\\n\\t'.join(config['local']['keys'])) if 'err_keys' in config['local'] and len(config['local']['err_keys']) >", "= subparsers.add_parser('status', description=status_commanddesc, help=status_commanddesc) status_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "Version 2.0 (the \"License\"); # you may not use this", "default=None, help=('id of workspace deployment on which to attach' '", "governing permissions and # limitations under the License. \"\"\"Command-line interface", "add-on mistyproxy for your workspace deployments' addon_mistyproxy_parser = subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc,", "' 'default depends on the supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG', type=str,", "import get_local_config, add_key, add_ssh_path, list_local_keys from .mgmt import find_wd, modify_local,", "'mark as unavailable; optionally wait for current instance to finish'", "config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--purge not supported for cprovider", "= subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int, help=('on Linux, 0", "positive') return 1 else: width, height = None, None if", ", JSON'), dest='output_format') subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version number", "default=False, help='print verbose messages about actions by the hardshare client',", "1 res['comments'] = [ 'Access is denied unless a rule", "if index is None: print('ERROR: given prefix does not match", "{}'.format(cprovider)) return 1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in findings:", "is not None: output_format = argv_parsed.output_format.lower() if output_format not in", "cprovider == 'proxy': print('--add-raw-device not supported for cprovider `proxy`') return", "config['wdeployments'][index]['id'] local_keys = list_local_keys() if len(local_keys) < 1: print('No valid", "help=('permit registration of more than 1 wdeployment; ' 'default is", "if ac is None: print('cannot register without initial local configuration.'", "' (can be unique prefix); ' 'this argument is not", "one or it is otherwise in a' ' non-recoverable state,", "valid.') return 1 ui_input = None while ui_input not in", "required ' 'if there is only 1 workspace deployment')) addon_cmdsh_parser.add_argument('--add',", "subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one', help=('permit registration of", "authorization token': print('wrong API token. Did it expire?') else: print(res['err'])", "or {} does not exist or ' 'has the wrong", "a last resort. First, try `hardshare' ' terminate` without --purge.')", "' 'if there is only 1 workspace deployment')) rules_parser.add_argument('-l', '--list',", "in local configuration:') if len(config['local']['wdeployments']) == 0: print('\\t(none)') else: for", "by applicable law or agreed to in writing, software #", "os.path.exists(argv_parsed.raw_device_path): print('ERROR: given device file does not exist') return 1", "device file to present in container') config_parser.add_argument('--cprovider', metavar='CPROVIDER', type=str, dest='cprovider',", "images; ' 'default depends on the supporting drivers')) attach_camera_parser.add_argument('--crop', metavar='CROPCONFIG',", "terminate without valid API client') return 1 try: ac.terminate(config['wdeployments'][index]['id']) except", "= None while ui_input not in ('y', 'yes'): print(('Do you", "availability, accept new instances' advertise_parser = subparsers.add_parser('ad', description=advertise_commanddesc, help=advertise_commanddesc) advertise_parser.add_argument('id_prefix',", "1 with open(local_keys[0], 'rt') as fp: tok = fp.read().strip() if", "if not argv_parsed.only_local_config: # Try to get remote config, given", "dest='add_addon_mistyproxy') addon_mistyproxy_parser.add_argument('--ip', metavar='ADDRESS', default=None, help='IP address of the Misty robot',", "return 1 elif argv_parsed.command == 'addon-vnc': if ac is None:", "dest='drop_all_rules', help=('remove all access rules; ' 'note that access is", "workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False, help='add add-on mistyproxy to allow", "marked for inclusion in container') config_parser.add_argument('--add-init-inside', metavar='CMD', type=str, dest='add_init_inside', default=None,", "terminate_p in wdeployment['terminate']: print('\\t\\t{}'.format(terminate_p)) print('\\nfound keys:') if len(config['local']['keys']) == 0:", "print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except: print('failed to", "elif argv_parsed.help_target_command == 'rules': rules_parser.print_help() elif argv_parsed.help_target_command == 'register': register_parser.print_help()", "ui_input = input().lower() if ui_input in ('n', 'no', ''): return", "this ' 'user account, whether or not started on this", "not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc !=", "but there is not one or it is otherwise in", "configuration found. (try `hardshare config -h`)') return 1 try: res", "1 else: if len(config['wdeployments']) > 1: print('ERROR: ambiguous command: more", ".format(os.getpid()))) pkglogger.addHandler(loghandler) if argv is None: argv = sys.argv[1:] argparser", "0: return 0 os.close(0) os.close(1) os.close(2) try: camera_main(wdeployments, tok=tok, dev=argv_parsed.camera,", "1 wdeployment; ' 'default is to fail if local configuration", "help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment' '", "subparsers.add_parser('addon-mistyproxy', description=addon_mistyproxy_commanddesc, help=addon_mistyproxy_commanddesc) addon_mistyproxy_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace", "add_key(argv_parsed.new_api_token) except: print('failed to add key') return 1 elif argv_parsed.new_ssh_path:", "!= 0: return 0 os.close(0) os.close(1) os.close(2) try: camera_main(wdeployments, tok=tok,", "if rc != 0: return rc if 'ssh_key' not in", "rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc wdeployment_id", "than 1 wdeployment; ' 'default is to fail if local", "= None, None if argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config) else: crop", "import Error as HSError from .addons import camera_main, stop_cameras from", "it' ' has been previously registered under' ' the same", "in errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path) elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except:", "deployment') return config, None, 1 indices.append(index) index = indices elif", "only get local information') return 1 config = { 'local':", "1: print('ERROR: ambiguous command: more than 1 workspace deployment defined.')", "None, 1 else: if len(config['wdeployments']) > 1: print('ERROR: ambiguous command:", "ac is not None remote_config = ac.get_remote_config(include_dissolved=argv_parsed.include_dissolved) except HSError as", "camera_main, stop_cameras from .addons import add_cmdsh, rm_cmdsh, add_vnc, rm_vnc, add_mistyproxy,", "a container provider: docker, podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str, dest='cprovider_img',", "ui_input not in ('y', 'yes'): print(('Do you want to dissolve", "local configuration, if present rm_wd(get_local_config(), wdid, save=True) elif argv_parsed.command ==", "elif argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except: print('failed to add key') return", "config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-raw-device not supported for cprovider", "print('wrong API token. Did it expire?') else: print(config['remote']['err']) return 1", "elif argv_parsed.help_target_command == 'attach-camera': attach_camera_parser.print_help() elif argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help()", "'get status of local instances and daemon' status_parser = subparsers.add_parser('status',", "= fp.read().strip() if argv_parsed.attach_camera_res: width, height = [int(x) for x", "try: config = get_local_config(create_if_empty=argv_parsed.create_config, collect_errors=True) except: print('error loading configuration data.'", "valid API client') return 1 try: ac.terminate(config['wdeployments'][index]['id']) except FileNotFoundError: print('ERROR:", "return 1 config['wdeployments'][index]['init_inside'].append(argv_parsed.add_init_inside) modify_local(config) elif argv_parsed.rm_init_inside: config, index, rc =", "argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else: print('Use `hardshare addon-mistyproxy` with", "here. (this only works if it' ' has been previously", "attach_camera_parser.add_argument('-d', '--daemon', action='store_true', default=False, help='detach from invoking terminal (i.e., run", "user account') else: print('\\nregistered workspace deployments with this user account:')", "hardshare help rules') return 1 elif argv_parsed.command == 'check': if", "loghandler = logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s (%(levelname)s) (pid: {});'", "if output_format == 'json': print(json.dumps(res)) else: # output_format == 'yaml'", "dest='include_dissolved', help='include configuration data of dissolved workspace deployments') config_parser.add_argument('--declare', metavar='ID',", "find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format == 'json': print(json.dumps(findings)) else:", "subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc) addon_cmdsh_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace", "config['wdeployments'][index]['cprovider'] if cprovider not in ['docker', 'podman', 'proxy']: print('unknown cprovider:", "dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None, help='id of workspace deployment to dissolve')", "is None: print('no local configuration found. (try `hardshare config -h`)')", "if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _, errored_keys = list_local_keys(collect_errors=True)", "applicable law or agreed to in writing, software # distributed", "pair (does NOT copy the key)') config_parser.add_argument('--add-raw-device', metavar='PATH', type=str, dest='raw_device_path',", "there is an active instance, then' ' stop it without", "config: ref = config['local']['wdeployments'] else: ref = config['wdeployments'] for jj,", "elif cprovider == 'docker': cp_images = subprocess.run([cprovider, 'image', 'inspect', argv_parsed.cprovider_img],", "output_format == 'json': print(json.dumps(findings)) else: # output_format == 'yaml' print(yaml.dump(findings,", "files in local key directory that' ' are not valid;", "try: subprocess.check_call([cprovider, 'rm', '-f', findings['container']['name']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed to", "0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None, help=('id of workspace deployment on", "default=False, dest='add_rule_permit_me', help='permit instantiations by you (the owner)') rules_parser.add_argument('--drop-all', action='store_true',", "it exist? is it broken?') return 1 if not argv_parsed.only_local_config:", "wdeployment['cprovider'], wdeployment['cargs'], )) if wdeployment['cprovider'] in ['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image']))", "None if argv_parsed.attach_camera_crop_config: crop = json.loads(argv_parsed.attach_camera_crop_config) else: crop = None", "1 try: ac.add_access_rule(wdid, to_user='*') except Exception as err: print('{}'.format(err)) return", "else: print('Use `hardshare addon-cmdsh` with a switch.') print('To get a", "one camera, then try 0')) attach_camera_parser.add_argument('id_prefix', metavar='ID', nargs='*', default=None, help=('id", "interface \"\"\" import argparse import json import logging import logging.handlers", "print('error loading configuration data. does it exist?') return None, None,", "failed to reach server. Are you connected to the Internet?')", "if argv_parsed.command == 'status': try: config = get_local_config() except: print('error", "cprovider == 'proxy': print('--purge not supported for cprovider `proxy`') return", "cprovider = config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--add-init-inside not supported", "subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version number and exit.') help_parser", "local_keys = list_local_keys() if len(local_keys) < 1: print('No valid keys", "print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try: if argv_parsed.drop_all_rules: ac.drop_access_rules(wdid)", "dest='register_at_most_one', help=('permit registration of more than 1 wdeployment; ' 'default", "configuration found. (try `hardshare config -h`)') return 1 try: wdid", "access is denied by default, ' 'including to you (the", "= config['wdeployments'][index]['cprovider'] if cprovider not in ['docker', 'podman', 'proxy']: print('unknown", "action='store_true', default=False, dest='only_local_config', help='only show local configuration data') config_parser.add_argument('--include-dissolved', action='store_true',", "rules (also known as capabilities or permissions)' rules_parser = subparsers.add_parser('rules',", "help='include configuration data of dissolved workspace deployments') config_parser.add_argument('--declare', metavar='ID', dest='declared_wdeployment_id',", "while contacting remote server.') if config['remote']['err'] == 'wrong authorization token':", "rerobots, Inc. # # Licensed under the Apache License, Version", "to add key') return 1 elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except:", "is None: wdid = None else: try: wdid = str(uuid.UUID(argv_parsed.id_prefix))", "== 'proxy': print('--add-init-inside not supported for cprovider `proxy`') return 1", "= config['wdeployments'][index]['cprovider'] if cprovider == 'proxy': print('--rm-init-inside not supported for", "== 'yaml' print(yaml.dump(res, default_flow_style=False)) elif argv_parsed.drop_all_rules or argv_parsed.add_rule_permit_me: try: if", "cmdsh for your workspace deployments' addon_cmdsh_parser = subparsers.add_parser('addon-cmdsh', description=addon_cmdsh_commanddesc, help=addon_cmdsh_commanddesc)", "carg = '--device={D}:{D}'.format(D=argv_parsed.raw_device_path) config['wdeployments'][index]['cargs'].append(carg) modify_local(config) elif argv_parsed.remove_raw_device_path is not None:", "the same user account.)')) rules_commanddesc = 'modify access rules (also", "podman, proxy') return 1 config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "by attach-camera' stop_cameras_parser = subparsers.add_parser('stop-cameras', description=stop_cameras_commanddesc, help=stop_cameras_commanddesc) stop_cameras_parser.add_argument('-a', '--all', action='store_true',", "os.close(2) else: pkglogger.addHandler(logging.StreamHandler()) logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname, mode='a',", "== 'ad': if ac is None: print('cannot register without initial", "argv_parsed.add_rule_permit_me: ac.add_access_rule(wdid) except Exception as err: print('{}'.format(err)) return 1 elif", "!= 0: return rc config['wdeployments'][index]['cprovider'] = selected_cprovider if selected_cprovider ==", "return 1 elif argv_parsed.command == 'rules': if ac is None:", "token': print('wrong API token. Did it expire?') else: print(res['err']) return", "cannot reach daemon. Does it exist? (Try `hardshare status`)') return", "elif res['err'] == 'wrong authorization token': print('wrong API token. Did", "= [] modify_local(config) elif argv_parsed.cprovider is not None: selected_cprovider =", "from aiohttp.client_exceptions import ClientConnectorError as ConnectionError from .core import WorkspaceInstance", "str(uuid.UUID(argv_parsed.id_prefix)) except: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if rc !=", "help=('id of workspace deployment to advertise' ' (can be unique", "= list_local_keys() if len(local_keys) < 1: print('No valid keys available.", "if argv_parsed.output_format is not None: output_format = argv_parsed.output_format.lower() if output_format", "# You may obtain a copy of the License at", ".format(wd['origin'])) if wd['dissolved']: print('\\tdissolved: {}'.format(wd['dissolved'])) elif argv_parsed.prune_err_keys: _, errored_keys =", "help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None, help='id of workspace deployment to", "attempt local clean-up; this' ' command is a last resort.", "attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int, help=('on Linux,", "res['comments'] = [ 'Access is denied unless a rule explicitly", "contacting remote server ' 'at {}'.format(ac.base_uri)) return 1 if 'err'", "action='store_true', default=False, help='print this help message and exit') argparser.add_argument('-V', '--version',", "workspace deployments' attach_camera_parser = subparsers.add_parser('attach-camera', description=attach_camera_commanddesc, help=attach_camera_commanddesc) attach_camera_parser.add_argument('camera', default=0, type=int,", "as unavailable; optionally wait for current instance to finish' terminate_parser", "indices, rc = get_config_with_index(argv_parsed.id_prefix) if rc != 0: return rc", "config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None, help='add program to list of commands", "wdeployments = [config['wdeployments'][jj]['id'] for jj in indices] local_keys = list_local_keys()", "fp.read().strip() try: if argv_parsed.add_addon_mistyproxy: if argv_parsed.targetaddr is None: print('--ip is", "0: print('found possible keys with errors:') for err_key_path, err in", "`hardshare config -h`)') return 1 try: wdid = str(uuid.UUID(argv_parsed.wdid)) except:", "len(config['local']['wdeployments']) == 0: print('\\t(none)') else: for wdeployment in config['local']['wdeployments']: print('{}\\n\\turl:", "config or config['ssh_key'] is None: print('WARNING: local configuration does not", "found. (try `hardshare config -h`)') return 1 if argv_parsed.id_prefix is", "stop container `{}`'.format(findings['container']['name'])) return 1 return 0 else: print('failed to", "(also known as capabilities or permissions)' rules_parser = subparsers.add_parser('rules', description=rules_commanddesc,", "tok, argv_parsed.targetaddr) elif argv_parsed.rm_addon_mistyproxy: rm_mistyproxy(wdeployment_id, tok) else: print('Use `hardshare addon-mistyproxy`", "return rc config['wdeployments'][index]['cprovider'] = selected_cprovider if selected_cprovider == 'proxy': config['wdeployments'][index]['image']", "denied unless a rule explicitly permits it.', ] if output_format", "config = get_local_config() except: print('error loading configuration data. does it", "stop_cameras(tok, allcam=argv_parsed.all_cameras) except ConnectionError: print('ERROR: failed to reach server. Are", "dest='list_rules', help='list all rules') rules_parser.add_argument('--permit-me', action='store_true', default=False, dest='add_rule_permit_me', help='permit instantiations", "been previously registered under' ' the same user account.)')) rules_commanddesc", "attach_camera_parser.print_help() elif argv_parsed.help_target_command == 'stop-cameras': stop_cameras_parser.print_help() elif argv_parsed.help_target_command == 'addon-cmdsh':", "0: findings = [WorkspaceInstance.inspect_instance()] else: findings = [] for wd", "except Exception as err: print('{}'.format(err)) return 1 else: print('Use `hardshare", "(can be unique prefix)')) dissolve_commanddesc = ('dissolve this workspace deployment,", "on this host'), dest='all_cameras') addon_cmdsh_commanddesc = 'manage add-on cmdsh for", "and len(config['local']['err_keys']) > 0: print('found possible keys with errors:') for", "it exist? (Try `hardshare status`)') return 1 return 0 elif", "mistyproxy', dest='rm_addon_mistyproxy') terminate_commanddesc = 'mark as unavailable; optionally wait for", "current instance to finish' terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix',", "== 'not found': print('not found: workspace deployment with id prefix", "logfname = 'hardshare_client.{}.log'.format(config['wdeployments'][index]['id']) loghandler = logging.FileHandler(filename=logfname, mode='a', delay=True) loghandler.setLevel(logging.DEBUG) loghandler.setFormatter(logging.Formatter('%(name)s.%(funcName)s", "print('ERROR: {}'.format(err)) return 1 except ConnectionError: print('ERROR: failed to reach", "as err: print('{}'.format(err)) return 1 if 'err' in res: if", "1 elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path) except: print('ERROR: {} or {}", "dest='only_local_config', help='only show local configuration data') config_parser.add_argument('--include-dissolved', action='store_true', default=False, dest='include_dissolved',", "otherwise in a' ' non-recoverable state, then mark it remotely", "= 'rerobots/hs-generic' modify_local(config) elif argv_parsed.cprovider_img is not None: config, index,", "return 1 if 'err' in res: if res['err'] == 'not", "'local': config, 'remote': remote_config, } if 'local' in config: ref", "keys:') if len(config['local']['keys']) == 0: print('\\t(none)') else: print('\\t' + '\\n\\t'.join(config['local']['keys']))", "finish' terminate_parser = subparsers.add_parser('stop-ad', description=terminate_commanddesc, help=terminate_commanddesc) terminate_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None,", "print('failed to add key') return 1 elif argv_parsed.new_ssh_path: try: add_ssh_path(argv_parsed.new_ssh_path)", "argv_parsed.help_target_command == 'check': check_parser.print_help() elif argv_parsed.help_target_command == 'dissolve': dissolve_parser.print_help() elif", "stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) except: print('failed to stop container `{}`'.format(findings['container']['name'])) return 1", "required ' 'if there is only 1 workspace deployment')) attach_camera_parser.add_argument('--width-height',", "= argparser.parse_args(argv) if argv_parsed.print_version or argv_parsed.command == 'version': from .", "elif argv_parsed.help_target_command == 'register': register_parser.print_help() elif argv_parsed.help_target_command == 'check': check_parser.print_help()", "' 'user account, whether or not started on this host'),", "default=False, help='remove add-on cmdsh', dest='rm_addon_cmdsh') addon_vnc_commanddesc = 'manage add-on vnc", "except: print('error loading configuration data.' ' does it exist? is", "workspace deployment' check_parser = subparsers.add_parser('check', description=check_commanddesc, help=check_commanddesc) check_parser.add_argument('id_prefix', metavar='ID', nargs='?',", "hardshare (this) package.', dest='print_version') argparser.add_argument('-v', '--verbose', action='store_true', default=False, help='print verbose", "else: # selected_cprovider \\in {docker, podman} if config['wdeployments'][index]['image'] is None:", "elif argv_parsed.command == 'ad': if ac is None: print('cannot register", "return None, None, 1 if len(config['wdeployments']) == 0: print(('ERROR: no", "required ' 'if there is only 1 workspace deployment')) addon_vnc_parser.add_argument('--add',", "dest='output_format') subparsers = argparser.add_subparsers(dest='command') subparsers.add_parser('version', help='print version number and exit.')", "data. does it exist?') return 1 if argv_parsed.id_prefix is None:", "wdid = config['wdeployments'][index]['id'] if argv_parsed.list_rules: try: res = ac.get_access_rules(wdid) except", "= list_local_keys(collect_errors=True) for err_key_path, err in errored_keys.items(): print('deleting {}...'.format(err_key_path)) os.unlink(err_key_path)", "if rc != 0: return rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg)", "wdeployment['cprovider'] in ['docker', 'podman']: print('\\timg: {}'.format(wdeployment['image'])) if wdeployment['terminate']: print('\\tterminate:') for", "in find_wd(config, argv_parsed.id_prefix, one_or_none=False): findings.append(WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][m])) if output_format == 'json': print(json.dumps(findings))", "= subparsers.add_parser('register', description=register_commanddesc, help=register_commanddesc) register_parser.add_argument('--permit-more', action='store_false', default=True, dest='register_at_most_one', help=('permit registration", "This action cannot be undone. ' '[y/N] ').format(wdid), end='') ui_input", "addon_cmdsh_parser.print_help() elif argv_parsed.help_target_command == 'addon-vnc': addon_vnc_parser.print_help() elif argv_parsed.help_target_command == 'addon-mistyproxy':", "inside container') config_parser.add_argument('--rm-init-inside', action='store_true', default=False, dest='rm_init_inside', help='remove (empty) list of", "argv_parsed.new_api_token: try: add_key(argv_parsed.new_api_token) except: print('failed to add key') return 1", "hardshare_pkg_version print(hardshare_pkg_version) return 0 elif argv_parsed.command is None or argv_parsed.command", "rc carg = '--device={D}:{D}'.format(D=argv_parsed.remove_raw_device_path) config['wdeployments'][index]['cargs'].remove(carg) modify_local(config) elif argv_parsed.add_init_inside is not", "help='select a container provider: docker, podman, proxy') config_parser.add_argument('--assign-image', metavar='IMG', type=str,", "elif argv_parsed.command == 'addon-vnc': if ac is None: print('cannot register", "1 findings = WorkspaceInstance.inspect_instance(wdeployment=config['wdeployments'][index]) if 'container' in findings: try: subprocess.check_call([cprovider,", "metavar='ID', nargs='?', default=None, help=('id of workspace deployment to check' '", "\"License\"); # you may not use this file except in", "'if there is only 1 workspace deployment')) addon_mistyproxy_parser.add_argument('--add', action='store_true', default=False,", "0: if len(config['wdeployments']) > 1: print('ERROR: ambiguous command: more than", "UNDONE)') dissolve_parser = subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None,", "print(('ERROR: no workspace deployment in local configuration.')) return config, None,", "must be positive') return 1 else: width, height = None,", "local configuration already ' 'has wdeployment declared')) check_commanddesc = 'check", "terminate_commanddesc = 'mark as unavailable; optionally wait for current instance", "print('Error occurred while contacting rerobots servers') print('Try config -l --local", "elif argv_parsed.command == 'stop-ad': config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "config['remote']: print('Error occurred while contacting remote server.') if config['remote']['err'] ==", "shown in `hardshare config -l` here')) config_parser.add_argument('--add-key', metavar='FILE', dest='new_api_token', help='add", "python # Copyright (C) 2018 rerobots, Inc. # # Licensed", "addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of workspace deployment' ' (can", "import WorkspaceInstance from .mgmt import get_local_config, add_key, add_ssh_path, list_local_keys from", "prefix)')) advertise_commanddesc = 'advertise availability, accept new instances' advertise_parser =", "argv_parsed.cprovider_img is not None: config, index, rc = get_config_with_index(argv_parsed.id_prefix) if", "create one') config_parser.add_argument('--add-terminate-prog', metavar='PATH', dest='add_terminate_prog', default=None, help='add program to list", "default=None, help=('width and height of captured images; ' 'default depends", "print('To get a help message, enter\\n\\n hardshare help addon-cmdsh') return", "is not one or it is otherwise in a' '", "'help': if hasattr(argv_parsed, 'help_target_command') and argv_parsed.help_target_command is not None: if", "from .err import Error as HSError from .addons import camera_main,", "= subparsers.add_parser('addon-vnc', description=addon_vnc_commanddesc, help=addon_vnc_commanddesc) addon_vnc_parser.add_argument('id_prefix', metavar='ID', nargs='?', default=None, help=('id of", "in indices] local_keys = list_local_keys() if len(local_keys) < 1: print('No", "= subparsers.add_parser('dissolve', description=dissolve_commanddesc, help=dissolve_commanddesc) dissolve_parser.add_argument('wdid', metavar='ID', nargs='?', default=None, help='id of", "as capabilities or permissions)' rules_parser = subparsers.add_parser('rules', description=rules_commanddesc, help=rules_commanddesc) rules_parser.add_argument('id_prefix',", "res: print('\\tdissolved: {}'.format(res['date_dissolved'])) elif argv_parsed.command == 'dissolve': if ac is", "key.\\n' 'Instances with connection type sshtun cannot launch.') pkglogger.removeHandler(loghandler) if", "!= 0: return rc cprovider = config['wdeployments'][index]['cprovider'] if cprovider not", "config_commanddesc = 'manage local and remote configuration' config_parser = subparsers.add_parser('config',", "by anyone') register_commanddesc = 'register new workspace deployment' register_parser =", "' hosted here. (this only works if it' ' has", "_, errored_keys = list_local_keys(collect_errors=True) for err_key_path, err in errored_keys.items(): print('deleting", "dest='attach_camera_res', default=None, help=('width and height of captured images; ' 'default", "there is only 1 workspace deployment')) addon_cmdsh_parser.add_argument('--add', action='store_true', default=False, help='add", "metavar='ID', dest='declared_wdeployment_id', default=None, help=('declare that workspace deployment is' ' hosted", "(the owner)')) rules_parser.add_argument('--permit-all', action='store_true', default=False, dest='add_rule_permit_all', help='permit instantiations by anyone')", "typically implies /dev/video0; ' 'if you only have one camera,", "'status': try: config = get_local_config() except: print('error loading configuration data.", "if 'err' in config['remote']: print('Error occurred while contacting remote server.')", "= 'manage add-on cmdsh for your workspace deployments' addon_cmdsh_parser =" ]
[ "Solution: def XXX(self, x: int) -> int: def solve(x): a", ">= -(2**31)): return d else: return 0 if x>=0: return", "val for i, v in p.items(): d += v*(10**i) if", "{} d=0 for ind, val in enumerate(a): p[ind] = val", "v*(10**i) if (2**31 - 1>= d >= -(2**31)): return d", "-> int: def solve(x): a = list(map(int,str(x))) p = {}", "else: return 0 if x>=0: return (solve(x)) if x<0: x", "(2**31 - 1>= d >= -(2**31)): return d else: return", "= val for i, v in p.items(): d += v*(10**i)", "p.items(): d += v*(10**i) if (2**31 - 1>= d >=", "1>= d >= -(2**31)): return d else: return 0 if", "= {} d=0 for ind, val in enumerate(a): p[ind] =", "enumerate(a): p[ind] = val for i, v in p.items(): d", "def solve(x): a = list(map(int,str(x))) p = {} d=0 for", "def XXX(self, x: int) -> int: def solve(x): a =", "v in p.items(): d += v*(10**i) if (2**31 - 1>=", "= list(map(int,str(x))) p = {} d=0 for ind, val in", "p[ind] = val for i, v in p.items(): d +=", "<reponame>kkcookies99/UAST class Solution: def XXX(self, x: int) -> int: def", "d else: return 0 if x>=0: return (solve(x)) if x<0:", "if x>=0: return (solve(x)) if x<0: x = -x return", "in enumerate(a): p[ind] = val for i, v in p.items():", "+= v*(10**i) if (2**31 - 1>= d >= -(2**31)): return", "return 0 if x>=0: return (solve(x)) if x<0: x =", "ind, val in enumerate(a): p[ind] = val for i, v", "i, v in p.items(): d += v*(10**i) if (2**31 -", "if (2**31 - 1>= d >= -(2**31)): return d else:", "int: def solve(x): a = list(map(int,str(x))) p = {} d=0", "class Solution: def XXX(self, x: int) -> int: def solve(x):", "list(map(int,str(x))) p = {} d=0 for ind, val in enumerate(a):", "x: int) -> int: def solve(x): a = list(map(int,str(x))) p", "val in enumerate(a): p[ind] = val for i, v in", "for i, v in p.items(): d += v*(10**i) if (2**31", "d >= -(2**31)): return d else: return 0 if x>=0:", "0 if x>=0: return (solve(x)) if x<0: x = -x", "x>=0: return (solve(x)) if x<0: x = -x return (-solve(x))", "d=0 for ind, val in enumerate(a): p[ind] = val for", "XXX(self, x: int) -> int: def solve(x): a = list(map(int,str(x)))", "p = {} d=0 for ind, val in enumerate(a): p[ind]", "d += v*(10**i) if (2**31 - 1>= d >= -(2**31)):", "- 1>= d >= -(2**31)): return d else: return 0", "return d else: return 0 if x>=0: return (solve(x)) if", "solve(x): a = list(map(int,str(x))) p = {} d=0 for ind,", "int) -> int: def solve(x): a = list(map(int,str(x))) p =", "for ind, val in enumerate(a): p[ind] = val for i,", "in p.items(): d += v*(10**i) if (2**31 - 1>= d", "-(2**31)): return d else: return 0 if x>=0: return (solve(x))", "a = list(map(int,str(x))) p = {} d=0 for ind, val" ]
[ "'city':city, 'state':state, 'photos': photos, 'walk_score': walk_score} rental_list.append(element) return rental_list @router.get('/for_rent_list')", "for_rent_list(api_key = config.settings.api_key, city: str = \"New York City\", state:", "state= \"NY\", limit = 4): url = os.getenv('url_list_for_sale') querystring =", "for i in range(limit): line = response[i]['address']['line'] city = response[i]['address']['city']", "city: str = \"New York City\", state: str= \"NY\", prop_type:", "'walk_score': walk_score} rental_list.append(element) return rental_list @router.get('/for_rent_list') async def for_rent_list(api_key =", "address = line +\" \"+ city + \" \"+ state", "Depends import sqlalchemy from pydantic import BaseModel, SecretStr from app", "import * load_dotenv() router = APIRouter() headers = {'x-rapidapi-key': os.getenv('api_key'),", "\"prop_type\": prop_type} response_for_rent = requests.request(\"GET\", url, params = querystring, headers", "url = os.getenv('url_property_detail') querystring = {\"property_id\":property_id} response_prop_detail = requests.request(\"GET\", url,", "import BaseModel, SecretStr from app import config from app.walk_score import", "city + \" \"+ state walk_score = just_walk_score(address, lat, lon)", "lat = response[i]['address']['lat'] lon = response[i]['address']['lon'] photos = response[i]['photos'] address", "state = response[i]['address']['state'] lat = response[i]['address']['lat'] lon = response[i]['address']['lon'] photos", "= requests.request(\"GET\", url, headers=headers, params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def", "= \"O3599084026\"): \"\"\" Parameters: property_id Returns: detailed information about the", "response[i]['photos'] address = line +\" \"+ city + \" \"+", "= os.getenv('url_list_for_rent') querystring = {\"city\": city, \"state_code\": state, \"limit\": limit,", "\"\"\"Realty Info\"\"\" import os import requests from dotenv import load_dotenv", "{'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key =", "return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def property_detail(property_id: str = \"O3599084026\"): \"\"\"", "url, headers=headers, params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def for_sale_list(api_key =", "async def for_sale_list(api_key = config.settings.api_key, city = \"New York City\",", "url, params = querystring, headers = headers,) response = response_for_rent.json()['properties']", "= \"New York City\", state= \"NY\", limit = 4): url", "'lat': lat, 'lon': lon, 'city':city, 'state':state, 'photos': photos, 'walk_score': walk_score}", "response_for_rent = requests.request(\"GET\", url, params = querystring, headers = headers,)", "response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def property_detail(property_id: str = \"O3599084026\"): \"\"\" Parameters:", "} @router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key = config.settings.api_key, city: str =", "requests.request(\"GET\", url, headers=headers, params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def for_sale_list(api_key", "photos, 'walk_score': walk_score} rental_list.append(element) return rental_list @router.get('/for_rent_list') async def for_rent_list(api_key", "walk_score} rental_list.append(element) return rental_list @router.get('/for_rent_list') async def for_rent_list(api_key = config.settings.api_key,", "Parameters: property_id Returns: detailed information about the property \"\"\" url", "{\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale = requests.request(\"GET\", url, headers=headers,", "= [] for i in range(limit): line = response[i]['address']['line'] city", "fastapi import APIRouter, Depends import sqlalchemy from pydantic import BaseModel,", "+\" \"+ city + \" \"+ state walk_score = just_walk_score(address,", "limit: int number of results to populate Returns: information about", "requests.request(\"GET\", url, params = querystring, headers = headers,) return response_for_rent.json()['properties']", "SecretStr from app import config from app.walk_score import * load_dotenv()", "city: str state: str prop_type: str ('condo', 'single_family', 'multi_family') limit:", "state: str prop_type: str ('condo', 'single_family', 'multi_family') limit: int number", "limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale = requests.request(\"GET\", url, headers=headers, params=querystring) return response_for_sale.json()['properties']", "+ \" \"+ state walk_score = just_walk_score(address, lat, lon) element", "property_detail(property_id: str = \"O3599084026\"): \"\"\" Parameters: property_id Returns: detailed information", "= headers,) response = response_for_rent.json()['properties'] rental_list = [] for i", "from app import config from app.walk_score import * load_dotenv() router", "'single_family', 'multi_family') limit: int number of results to populate Returns:", "\"NY\", limit = 4): url = os.getenv('url_list_for_sale') querystring = {\"city\":", "str ('condo', 'single_family', 'multi_family') limit: int number of results to", "response[i]['address']['line'] city = response[i]['address']['city'] state = response[i]['address']['state'] lat = response[i]['address']['lat']", "def for_sale_list(api_key = config.settings.api_key, city = \"New York City\", state=", "information about properties for rent \"\"\" url = os.getenv('url_list_for_rent') querystring", "router = APIRouter() headers = {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') }", "= just_walk_score(address, lat, lon) element = {'address': address, 'lat': lat,", "= \"New York City\", state: str= \"NY\", prop_type: str =", "detailed information about the property \"\"\" url = os.getenv('url_property_detail') querystring", "@router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key = config.settings.api_key, city: str = \"New", "= \"condo\", limit: int = 4): \"\"\" Parameters: api_key city:", ",\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale = requests.request(\"GET\", url, headers=headers, params=querystring) return", "\"New York City\", state: str= \"NY\", prop_type: str = \"condo\",", "= {\"city\": city, \"state_code\": state, \"limit\": limit, \"offset\": \"0\", \"sort\":\"relevance\",", "= response[i]['address']['state'] lat = response[i]['address']['lat'] lon = response[i]['address']['lon'] photos =", "state walk_score = just_walk_score(address, lat, lon) element = {'address': address,", "= {'address': address, 'lat': lat, 'lon': lon, 'city':city, 'state':state, 'photos':", "line +\" \"+ city + \" \"+ state walk_score =", "city = \"New York City\", state= \"NY\", limit = 4):", "= response[i]['photos'] address = line +\" \"+ city + \"", "querystring = {\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale = requests.request(\"GET\",", "'state':state, 'photos': photos, 'walk_score': walk_score} rental_list.append(element) return rental_list @router.get('/for_rent_list') async", "limit, \"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent = requests.request(\"GET\", url,", "4): url = os.getenv('url_list_for_sale') querystring = {\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\":", "response_for_rent.json()['properties'] rental_list = [] for i in range(limit): line =", "\"O3599084026\"): \"\"\" Parameters: property_id Returns: detailed information about the property", "@router.get('/for_rent_list') async def for_rent_list(api_key = config.settings.api_key, city: str = \"New", "\"+ city + \" \"+ state walk_score = just_walk_score(address, lat,", "state: str= \"NY\", prop_type: str = \"condo\", limit: int =", "\"condo\", limit: int = 4): \"\"\" Parameters: api_key city: str", "url = os.getenv('url_list_for_sale') querystring = {\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"}", "= config.settings.api_key, city = \"New York City\", state= \"NY\", limit", "def for_rent_list(api_key = config.settings.api_key, city: str = \"New York City\",", "rental_list = [] for i in range(limit): line = response[i]['address']['line']", "from fastapi import APIRouter, Depends import sqlalchemy from pydantic import", "rental_list.append(element) return rental_list @router.get('/for_rent_list') async def for_rent_list(api_key = config.settings.api_key, city:", "rent \"\"\" url = os.getenv('url_list_for_rent') querystring = {\"city\": city, \"state_code\":", "= response[i]['address']['lat'] lon = response[i]['address']['lon'] photos = response[i]['photos'] address =", "params = querystring, headers = headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async", "= response[i]['address']['line'] city = response[i]['address']['city'] state = response[i]['address']['state'] lat =", "\"\"\" Parameters: property_id Returns: detailed information about the property \"\"\"", "information about the property \"\"\" url = os.getenv('url_property_detail') querystring =", "\"\"\" url = os.getenv('url_property_detail') querystring = {\"property_id\":property_id} response_prop_detail = requests.request(\"GET\",", "def property_detail(property_id: str = \"O3599084026\"): \"\"\" Parameters: property_id Returns: detailed", "4): \"\"\" Parameters: api_key city: str state: str prop_type: str", "\"\"\" Parameters: api_key city: str state: str prop_type: str ('condo',", "\" \"+ state walk_score = just_walk_score(address, lat, lon) element =", "prop_type: str = \"condo\", limit: int = 4): \"\"\" Parameters:", "= querystring, headers = headers,) response = response_for_rent.json()['properties'] rental_list =", "= response[i]['address']['city'] state = response[i]['address']['state'] lat = response[i]['address']['lat'] lon =", "for_sale_list(api_key = config.settings.api_key, city = \"New York City\", state= \"NY\",", "York City\", state= \"NY\", limit = 4): url = os.getenv('url_list_for_sale')", "response[i]['address']['lon'] photos = response[i]['photos'] address = line +\" \"+ city", "os.getenv('url_list_for_sale') querystring = {\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale =", "return rental_list @router.get('/for_rent_list') async def for_rent_list(api_key = config.settings.api_key, city: str", "= querystring, headers = headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def", "headers = {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list') async def", "import sqlalchemy from pydantic import BaseModel, SecretStr from app import", "Info\"\"\" import os import requests from dotenv import load_dotenv from", "streamlined_rent_list(api_key = config.settings.api_key, city: str = \"New York City\", state:", "'multi_family') limit: int number of results to populate Returns: information", "lon) element = {'address': address, 'lat': lat, 'lon': lon, 'city':city,", "for rent \"\"\" url = os.getenv('url_list_for_rent') querystring = {\"city\": city,", "city, \"state_code\": state, \"limit\": limit, \"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\": prop_type}", "'lon': lon, 'city':city, 'state':state, 'photos': photos, 'walk_score': walk_score} rental_list.append(element) return", "url, params = querystring, headers = headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}')", "headers=headers, params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def for_sale_list(api_key = config.settings.api_key,", "* load_dotenv() router = APIRouter() headers = {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host':", "\"state_code\": state, \"limit\": limit, \"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent", "of results to populate Returns: information about properties for rent", "url = os.getenv('url_list_for_rent') querystring = {\"city\": city, \"state_code\": state, \"limit\":", "querystring, headers = headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def property_detail(property_id:", "= os.getenv('url_list_for_sale') querystring = {\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale", "querystring = {\"property_id\":property_id} response_prop_detail = requests.request(\"GET\", url, headers=headers, params=querystring) return", "response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def for_sale_list(api_key = config.settings.api_key, city = \"New", "{'address': address, 'lat': lat, 'lon': lon, 'city':city, 'state':state, 'photos': photos,", "response[i]['address']['lat'] lon = response[i]['address']['lon'] photos = response[i]['photos'] address = line", "= response_for_rent.json()['properties'] rental_list = [] for i in range(limit): line", "pydantic import BaseModel, SecretStr from app import config from app.walk_score", "str = \"New York City\", state: str= \"NY\", prop_type: str", "= requests.request(\"GET\", url, params = querystring, headers = headers,) response", "BaseModel, SecretStr from app import config from app.walk_score import *", "from dotenv import load_dotenv from fastapi import APIRouter, Depends import", "load_dotenv() router = APIRouter() headers = {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host')", "lon = response[i]['address']['lon'] photos = response[i]['photos'] address = line +\"", "= response[i]['address']['lon'] photos = response[i]['photos'] address = line +\" \"+", "lon, 'city':city, 'state':state, 'photos': photos, 'walk_score': walk_score} rental_list.append(element) return rental_list", "{\"city\": city, \"state_code\": state, \"limit\": limit, \"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\":", "int number of results to populate Returns: information about properties", "params = querystring, headers = headers,) response = response_for_rent.json()['properties'] rental_list", "app.walk_score import * load_dotenv() router = APIRouter() headers = {'x-rapidapi-key':", "i in range(limit): line = response[i]['address']['line'] city = response[i]['address']['city'] state", "photos = response[i]['photos'] address = line +\" \"+ city +", "\"limit\": limit, \"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent = requests.request(\"GET\",", "import APIRouter, Depends import sqlalchemy from pydantic import BaseModel, SecretStr", "line = response[i]['address']['line'] city = response[i]['address']['city'] state = response[i]['address']['state'] lat", "return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def for_sale_list(api_key = config.settings.api_key, city =", "('condo', 'single_family', 'multi_family') limit: int number of results to populate", "import config from app.walk_score import * load_dotenv() router = APIRouter()", "\"0\", \"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent = requests.request(\"GET\", url, params =", "state, \"limit\": limit, \"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent =", "property_id Returns: detailed information about the property \"\"\" url =", "properties for rent \"\"\" url = os.getenv('url_list_for_rent') querystring = {\"city\":", "async def property_detail(property_id: str = \"O3599084026\"): \"\"\" Parameters: property_id Returns:", "= os.getenv('url_property_detail') querystring = {\"property_id\":property_id} response_prop_detail = requests.request(\"GET\", url, headers=headers,", "'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key = config.settings.api_key, city:", "populate Returns: information about properties for rent \"\"\" url =", "address, 'lat': lat, 'lon': lon, 'city':city, 'state':state, 'photos': photos, 'walk_score':", "lat, 'lon': lon, 'city':city, 'state':state, 'photos': photos, 'walk_score': walk_score} rental_list.append(element)", "city = response[i]['address']['city'] state = response[i]['address']['state'] lat = response[i]['address']['lat'] lon", "app import config from app.walk_score import * load_dotenv() router =", "prop_type: str ('condo', 'single_family', 'multi_family') limit: int number of results", "os.getenv('url_list_for_rent') querystring = {\"city\": city, \"state_code\": state, \"limit\": limit, \"offset\":", "os.getenv('url_property_detail') querystring = {\"property_id\":property_id} response_prop_detail = requests.request(\"GET\", url, headers=headers, params=querystring)", "prop_type} response_for_rent = requests.request(\"GET\", url, params = querystring, headers =", "the property \"\"\" url = os.getenv('url_property_detail') querystring = {\"property_id\":property_id} response_prop_detail", "APIRouter() headers = {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list') async", "str= \"NY\", prop_type: str = \"condo\", limit: int = 4):", "querystring, headers = headers,) response = response_for_rent.json()['properties'] rental_list = []", "'photos': photos, 'walk_score': walk_score} rental_list.append(element) return rental_list @router.get('/for_rent_list') async def", "to populate Returns: information about properties for rent \"\"\" url", "[] for i in range(limit): line = response[i]['address']['line'] city =", "def streamlined_rent_list(api_key = config.settings.api_key, city: str = \"New York City\",", "\"+ state walk_score = just_walk_score(address, lat, lon) element = {'address':", "response = response_for_rent.json()['properties'] rental_list = [] for i in range(limit):", "api_key city: str state: str prop_type: str ('condo', 'single_family', 'multi_family')", "str = \"condo\", limit: int = 4): \"\"\" Parameters: api_key", "walk_score = just_walk_score(address, lat, lon) element = {'address': address, 'lat':", "Returns: detailed information about the property \"\"\" url = os.getenv('url_property_detail')", "in range(limit): line = response[i]['address']['line'] city = response[i]['address']['city'] state =", "= requests.request(\"GET\", url, params = querystring, headers = headers,) return", "response_prop_detail = requests.request(\"GET\", url, headers=headers, params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async", "async def for_rent_list(api_key = config.settings.api_key, city: str = \"New York", "City\", state: str= \"NY\", prop_type: str = \"condo\", limit: int", "results to populate Returns: information about properties for rent \"\"\"", "range(limit): line = response[i]['address']['line'] city = response[i]['address']['city'] state = response[i]['address']['state']", "dotenv import load_dotenv from fastapi import APIRouter, Depends import sqlalchemy", "config from app.walk_score import * load_dotenv() router = APIRouter() headers", "lat, lon) element = {'address': address, 'lat': lat, 'lon': lon,", "number of results to populate Returns: information about properties for", "response[i]['address']['state'] lat = response[i]['address']['lat'] lon = response[i]['address']['lon'] photos = response[i]['photos']", "{\"property_id\":property_id} response_prop_detail = requests.request(\"GET\", url, headers=headers, params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list')", "str = \"O3599084026\"): \"\"\" Parameters: property_id Returns: detailed information about", "load_dotenv from fastapi import APIRouter, Depends import sqlalchemy from pydantic", "Parameters: api_key city: str state: str prop_type: str ('condo', 'single_family',", "@router.get('/for_sale_list') async def for_sale_list(api_key = config.settings.api_key, city = \"New York", "str prop_type: str ('condo', 'single_family', 'multi_family') limit: int number of", "rental_list @router.get('/for_rent_list') async def for_rent_list(api_key = config.settings.api_key, city: str =", "import os import requests from dotenv import load_dotenv from fastapi", "config.settings.api_key, city: str = \"New York City\", state: str= \"NY\",", "= 4): \"\"\" Parameters: api_key city: str state: str prop_type:", "Returns: information about properties for rent \"\"\" url = os.getenv('url_list_for_rent')", "\"New York City\", state= \"NY\", limit = 4): url =", "from pydantic import BaseModel, SecretStr from app import config from", "querystring = {\"city\": city, \"state_code\": state, \"limit\": limit, \"offset\": \"0\",", "\"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent = requests.request(\"GET\", url, params = querystring,", "async def streamlined_rent_list(api_key = config.settings.api_key, city: str = \"New York", "requests.request(\"GET\", url, params = querystring, headers = headers,) response =", "headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def property_detail(property_id: str = \"O3599084026\"):", "about the property \"\"\" url = os.getenv('url_property_detail') querystring = {\"property_id\":property_id}", "limit: int = 4): \"\"\" Parameters: api_key city: str state:", "config.settings.api_key, city = \"New York City\", state= \"NY\", limit =", "from app.walk_score import * load_dotenv() router = APIRouter() headers =", "= {\"city\": city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale = requests.request(\"GET\", url,", "\"offset\": \"0\", \"sort\":\"relevance\", \"prop_type\": prop_type} response_for_rent = requests.request(\"GET\", url, params", "= config.settings.api_key, city: str = \"New York City\", state: str=", "city ,\"limit\": limit,\"offset\":\"0\",\"state_code\": state,\"sort\":\"relevance\"} response_for_sale = requests.request(\"GET\", url, headers=headers, params=querystring)", "= headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def property_detail(property_id: str =", "= {\"property_id\":property_id} response_prop_detail = requests.request(\"GET\", url, headers=headers, params=querystring) return response_prop_detail.json()['properties']", "requests from dotenv import load_dotenv from fastapi import APIRouter, Depends", "os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key = config.settings.api_key,", "str state: str prop_type: str ('condo', 'single_family', 'multi_family') limit: int", "York City\", state: str= \"NY\", prop_type: str = \"condo\", limit:", "headers,) response = response_for_rent.json()['properties'] rental_list = [] for i in", "headers = headers,) return response_for_rent.json()['properties'] @router.get('/for_rent_list/{property_id}') async def property_detail(property_id: str", "APIRouter, Depends import sqlalchemy from pydantic import BaseModel, SecretStr from", "= line +\" \"+ city + \" \"+ state walk_score", "params=querystring) return response_prop_detail.json()['properties'] @router.get('/for_sale_list') async def for_sale_list(api_key = config.settings.api_key, city", "sqlalchemy from pydantic import BaseModel, SecretStr from app import config", "\"\"\" url = os.getenv('url_list_for_rent') querystring = {\"city\": city, \"state_code\": state,", "= APIRouter() headers = {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list')", "int = 4): \"\"\" Parameters: api_key city: str state: str", "= {'x-rapidapi-key': os.getenv('api_key'), 'x-rapidapi-host': os.getenv('host') } @router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key", "property \"\"\" url = os.getenv('url_property_detail') querystring = {\"property_id\":property_id} response_prop_detail =", "just_walk_score(address, lat, lon) element = {'address': address, 'lat': lat, 'lon':", "limit = 4): url = os.getenv('url_list_for_sale') querystring = {\"city\": city", "os import requests from dotenv import load_dotenv from fastapi import", "about properties for rent \"\"\" url = os.getenv('url_list_for_rent') querystring =", "import requests from dotenv import load_dotenv from fastapi import APIRouter,", "import load_dotenv from fastapi import APIRouter, Depends import sqlalchemy from", "@router.get('/for_rent_list/{property_id}') async def property_detail(property_id: str = \"O3599084026\"): \"\"\" Parameters: property_id", "element = {'address': address, 'lat': lat, 'lon': lon, 'city':city, 'state':state,", "= 4): url = os.getenv('url_list_for_sale') querystring = {\"city\": city ,\"limit\":", "os.getenv('host') } @router.get('/streamlined_rent_list') async def streamlined_rent_list(api_key = config.settings.api_key, city: str", "headers = headers,) response = response_for_rent.json()['properties'] rental_list = [] for", "response[i]['address']['city'] state = response[i]['address']['state'] lat = response[i]['address']['lat'] lon = response[i]['address']['lon']", "City\", state= \"NY\", limit = 4): url = os.getenv('url_list_for_sale') querystring", "\"NY\", prop_type: str = \"condo\", limit: int = 4): \"\"\"" ]
[ "+ 600) print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict =", "weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600) print('timespan=', timespan)", "weewx.xtypes archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql = {'database_name':", "SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime , b.dateTime as EndTime", "b WHERE b.dateTime = (Select MAX(c.dateTime) FROM archive c WHERE", "'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name, 'start': timespan.start, 'stop': timespan.stop,", "MIN(dateTime) FROM archive WHERE dateTime >= %(start)s);\" sql_stmt = SQL_TEMPLATE", "= (Select MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime", "\"\"\"Select a.dateTime as StartTime , b.dateTime as EndTime , b.dateTime-a.dateTime", "archive WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM archive WHERE", "\"SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s \" \\ \"WHERE dateTime", "FROM archive a, archive b WHERE b.dateTime = (Select MAX(c.dateTime)", "as StartTime , b.dateTime as EndTime , b.dateTime-a.dateTime as TimeChange", "'<PASSWORD>', 'driver': 'weedb.mysql'} sql_str = \"SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM", "\"SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\ \"FROM archive a,", "% ('avg', 'outTemp', 'archive') timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan =", "/ (b.dateTime-a.dateTime) \"\\ \"FROM archive a, archive b \"\\ \"WHERE", "b.dateTime = (SELECT MAX(dateTime) FROM archive WHERE dateTime <= %(stop)s)", "of records with db_manager.connection.cursor() as cursor: for row in cursor.execute(sql_stmt):", "'start': timespan.start, 'stop': timespan.stop, } SQL_TEMPLATE = \"SELECT (ch8_a_energy2 -", "weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600) print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite) as db_manager:", "FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime,", "as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as", "c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive", "(b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\ \"FROM archive a, archive", "archive a, archive b \"\\ \"WHERE b.dateTime = (SELECT MAX(dateTime)", "FROM archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime", "(SELECT MAX(dateTime) FROM archive WHERE dateTime <= %(stop)s) \"\\ \"AND", "archive b WHERE b.dateTime = (Select MAX(dateTime) FROM archive WHERE", "= weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600) print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite) as", "'outTemp', 'archive') timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000", "- %(start)s) FROM archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select a.dateTime", "FROM %s \" \\ \"WHERE dateTime > ? AND dateTime", "= (SELECT MIN(dateTime) FROM archive WHERE dateTime >= %(start)s);\" sql_stmt", "%s(%s), MIN(usUnits), MAX(usUnits) FROM %s \" \\ \"WHERE dateTime >", "(%(stop)s - %(start)s) FROM archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select", "(Select MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime =", "timespan.stop, } SQL_TEMPLATE = \"SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM", "'ch8_a_energy2', 'table_name': db_manager.table_name, 'start': timespan.start, 'stop': timespan.stop, } SQL_TEMPLATE =", "b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a, archive b WHERE b.dateTime", "timespan) with weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict = { 'aggregate_type': 'diff',", "archive b WHERE b.dateTime = (Select MAX(c.dateTime) FROM archive c", ", b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a Inner Join archive", "b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM", "archive a, archive b WHERE b.dateTime = (Select MAX(dateTime) FROM", "Get the number of records with db_manager.connection.cursor() as cursor: for", "FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM", "archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM", "WHERE b.dateTime = (Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND", "'weedb.sqlite'} archive_mysql = {'database_name': 'weewx', 'user': 'weewx', 'password': '<PASSWORD>', 'driver':", "- (SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) / (%(stop)s -", "a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2", "'driver': 'weedb.sqlite'} archive_mysql = {'database_name': 'weewx', 'user': 'weewx', 'password': '<PASSWORD>',", "dateTime > ? AND dateTime <= ?\" % ('avg', 'outTemp',", "time import weeutil.weeutil import weewx.manager import weewx.xtypes archive_sqlite = {'database_name':", "'user': 'weewx', 'password': '<PASSWORD>', 'driver': 'weedb.mysql'} sql_str = \"SELECT %s(%s),", "dateTime <= ?\" % ('avg', 'outTemp', 'archive') timespan = weeutil.weeutil.TimeSpan(1573245000,", "b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a Inner Join archive b", "\"\\ \"FROM archive a, archive b \"\\ \"WHERE b.dateTime =", "<= ?\" % ('avg', 'outTemp', 'archive') timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800)", "{ 'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name, 'start': timespan.start, 'stop':", "db_manager.table_name, 'start': timespan.start, 'stop': timespan.stop, } SQL_TEMPLATE = \"SELECT (ch8_a_energy2", "\"\"\"Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange,", "= (Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime =", "MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT (b.%(obs_type)s -", "= \"SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\ \"FROM archive", "archive b \"\\ \"WHERE b.dateTime = (SELECT MAX(dateTime) FROM archive", "weeutil.weeutil import weewx.manager import weewx.xtypes archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver':", "'/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql = {'database_name': 'weewx', 'user': 'weewx', 'password':", "as ValueChange FROM archive a, archive b WHERE b.dateTime =", "'table_name': db_manager.table_name, 'start': timespan.start, 'stop': timespan.stop, } SQL_TEMPLATE = \"SELECT", "timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600) print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite)", "\"FROM archive a, archive b \"\\ \"WHERE b.dateTime = (SELECT", "interpolate_dict = { 'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name, 'start':", "\"\\ \"AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime", "SQL_TEMPLATE = \"SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\ \"FROM", "sql_str = \"SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s \" \\", "(SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime", "__future__ import print_function import time import weeutil.weeutil import weewx.manager import", "} SQL_TEMPLATE = \"SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive", "'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name, 'start': timespan.start, 'stop': timespan.stop, } SQL_TEMPLATE", "number of records with db_manager.connection.cursor() as cursor: for row in", "AND dateTime <= ?\" % ('avg', 'outTemp', 'archive') timespan =", "archive WHERE dateTime <= %(stop)s) \"\\ \"AND a.dateTime = (SELECT", "# Get the number of records with db_manager.connection.cursor() as cursor:", "\"WHERE dateTime > ? AND dateTime <= ?\" % ('avg',", "= SQL_TEMPLATE % interpolate_dict print(sql_stmt) # Get the number of", "import weeutil.weeutil import weewx.manager import weewx.xtypes archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb',", "a, archive b \"\\ \"WHERE b.dateTime = (SELECT MAX(dateTime) FROM", "archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime", "records with db_manager.connection.cursor() as cursor: for row in cursor.execute(sql_stmt): print(row)", "as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a Inner", "WHERE b.dateTime = (Select MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600))", "- a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\ \"FROM archive a, archive b", "b.dateTime = (Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime", "FROM archive WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM archive", "{'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql = {'database_name': 'weewx', 'user': 'weewx',", "WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime)", "+ 600)\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime as", "(SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT (b.%(obs_type)s", "b.dateTime as EndTime , b.dateTime-a.dateTime as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as", "as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive", "EndTime , b.dateTime-a.dateTime as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM", "dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime , b.dateTime as", "= { 'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name, 'start': timespan.start,", "timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600)", "ValueChange FROM archive a, archive b WHERE b.dateTime = (Select", "= \"SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s))", "TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a, archive b WHERE", "a Inner Join archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 +", "'driver': 'weedb.mysql'} sql_str = \"SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s", "= (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select", "'stop': timespan.stop, } SQL_TEMPLATE = \"SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2", "= {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql = {'database_name': 'weewx', 'user':", "= (SELECT MAX(dateTime) FROM archive WHERE dateTime <= %(stop)s) \"\\", "= \"\"\"Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as", "b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as", "import weewx.xtypes archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql =", "MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT", "b WHERE b.dateTime = (Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600))", "WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE", "\"AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime >=", "archive WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive", "dateTime >= %(start)s);\" sql_stmt = SQL_TEMPLATE % interpolate_dict print(sql_stmt) #", "b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime", "sql_stmt = SQL_TEMPLATE % interpolate_dict print(sql_stmt) # Get the number", "as EndTime , b.dateTime-a.dateTime as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange", "FROM archive WHERE dateTime >= %(start)s);\" sql_stmt = SQL_TEMPLATE %", "'password': '<PASSWORD>', 'driver': 'weedb.mysql'} sql_str = \"SELECT %s(%s), MIN(usUnits), MAX(usUnits)", "from __future__ import print_function import time import weeutil.weeutil import weewx.manager", "FROM archive a Inner Join archive b ON b.dateTime>=1573245000 AND", "as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a, archive b", "import print_function import time import weeutil.weeutil import weewx.manager import weewx.xtypes", "dateTime <= %(stop)s) \"\\ \"AND a.dateTime = (SELECT MIN(dateTime) FROM", "MIN(usUnits), MAX(usUnits) FROM %s \" \\ \"WHERE dateTime > ?", "(b.dateTime-a.dateTime) \"\\ \"FROM archive a, archive b \"\\ \"WHERE b.dateTime", "import time import weeutil.weeutil import weewx.manager import weewx.xtypes archive_sqlite =", "= \"SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s \" \\ \"WHERE", "%(start)s);\" sql_stmt = SQL_TEMPLATE % interpolate_dict print(sql_stmt) # Get the", "as db_manager: interpolate_dict = { 'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2', 'table_name':", "Inner Join archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)\"\"\"", "timespan.start, 'stop': timespan.stop, } SQL_TEMPLATE = \"SELECT (ch8_a_energy2 - (SELECT", "archive_mysql = {'database_name': 'weewx', 'user': 'weewx', 'password': '<PASSWORD>', 'driver': 'weedb.mysql'}", "archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime ,", "{'database_name': 'weewx', 'user': 'weewx', 'password': '<PASSWORD>', 'driver': 'weedb.mysql'} sql_str =", "as ValueChange FROM archive a Inner Join archive b ON", "% interpolate_dict print(sql_stmt) # Get the number of records with", "weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict = { 'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2',", "FROM archive c WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime)", "a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime >= %(start)s);\"", "FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT (b.%(obs_type)s - a.%(obs_type)s)", "= \"\"\"Select a.dateTime as StartTime , b.dateTime as EndTime ,", "WHERE dateTime <= %(stop)s) \"\\ \"AND a.dateTime = (SELECT MIN(dateTime)", "ValueChange FROM archive a Inner Join archive b ON b.dateTime>=1573245000", "= {'database_name': 'weewx', 'user': 'weewx', 'password': '<PASSWORD>', 'driver': 'weedb.mysql'} sql_str", "a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE =", "\\ \"WHERE dateTime > ? AND dateTime <= ?\" %", "interpolate_dict print(sql_stmt) # Get the number of records with db_manager.connection.cursor()", "weewx.manager import weewx.xtypes archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql", "the number of records with db_manager.connection.cursor() as cursor: for row", "with weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict = { 'aggregate_type': 'diff', 'obs_type':", "%s \" \\ \"WHERE dateTime > ? AND dateTime <=", "WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM archive WHERE dateTime=%(stop)s;\"", "<= %(stop)s) \"\\ \"AND a.dateTime = (SELECT MIN(dateTime) FROM archive", "%(stop)s) \"\\ \"AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE", "> ? AND dateTime <= ?\" % ('avg', 'outTemp', 'archive')", "StartTime , b.dateTime as EndTime , b.dateTime-a.dateTime as TimeChange ,", "b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a, archive", "\" \\ \"WHERE dateTime > ? AND dateTime <= ?\"", "(Select MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT", "archive WHERE dateTime >= %(start)s);\" sql_stmt = SQL_TEMPLATE % interpolate_dict", "a, archive b WHERE b.dateTime = (Select MAX(c.dateTime) FROM archive", "(ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) / (%(stop)s", "MAX(dateTime) FROM archive WHERE dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime)", "(SELECT MIN(dateTime) FROM archive WHERE dateTime >= %(start)s);\" sql_stmt =", "ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime", "StartTime, b.datetime as EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange", "MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as", "WHERE dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime , b.dateTime", "'weewx', 'password': '<PASSWORD>', 'driver': 'weedb.mysql'} sql_str = \"SELECT %s(%s), MIN(usUnits),", "MAX(usUnits) FROM %s \" \\ \"WHERE dateTime > ? AND", "archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT (b.%(obs_type)s - a.%(obs_type)s) /", "\"WHERE b.dateTime = (SELECT MAX(dateTime) FROM archive WHERE dateTime <=", "print(sql_stmt) # Get the number of records with db_manager.connection.cursor() as", "archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'} archive_mysql = {'database_name': 'weewx',", "1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600) print('timespan=', timespan) with", ", b.dateTime as EndTime , b.dateTime-a.dateTime as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2", "Join archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE", "dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT (b.%(obs_type)s - a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\", "b \"\\ \"WHERE b.dateTime = (SELECT MAX(dateTime) FROM archive WHERE", "dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE", "= (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"SELECT", "dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime as EndTime,", "'weewx', 'user': 'weewx', 'password': '<PASSWORD>', 'driver': 'weedb.mysql'} sql_str = \"SELECT", "?\" % ('avg', 'outTemp', 'archive') timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan", "1573245000 + 600) print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict", "SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime as EndTime, b.dateTime-a.dateTime", "AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE", "SQL_TEMPLATE = \"SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive WHERE", "a.dateTime as StartTime , b.dateTime as EndTime , b.dateTime-a.dateTime as", "'archive') timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 +", "WHERE dateTime>=1573245000);\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime as", "dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\"", ">= %(start)s);\" sql_stmt = SQL_TEMPLATE % interpolate_dict print(sql_stmt) # Get", "\"SELECT (ch8_a_energy2 - (SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) /", "archive a Inner Join archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000", "ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s) FROM", "db_manager: interpolate_dict = { 'aggregate_type': 'diff', 'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name,", "archive b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE =", "a, archive b WHERE b.dateTime = (Select MAX(dateTime) FROM archive", "a.%(obs_type)s) / (b.dateTime-a.dateTime) \"\\ \"FROM archive a, archive b \"\\", "b.dateTime = (Select MAX(c.dateTime) FROM archive c WHERE c.dateTime<=(1573245000+600)) AND", "import weewx.manager import weewx.xtypes archive_sqlite = {'database_name': '/home/weewx/archive/weepwr.sdb', 'driver': 'weedb.sqlite'}", "600)\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime, b.datetime as EndTime,", "c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE dateTime>=1573245000);\"\"\"", "b ON b.dateTime>=1573245000 AND b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE = \"\"\"Select", "= weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000, 1573245000 + 600) print('timespan=',", "600) print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict = {", "\"\\ \"WHERE b.dateTime = (SELECT MAX(dateTime) FROM archive WHERE dateTime", "TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a Inner Join", "? AND dateTime <= ?\" % ('avg', 'outTemp', 'archive') timespan", "WHERE dateTime >= %(start)s);\" sql_stmt = SQL_TEMPLATE % interpolate_dict print(sql_stmt)", "/ (%(stop)s - %(start)s) FROM archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE =", "FROM archive WHERE dateTime <= %(stop)s) \"\\ \"AND a.dateTime =", "('avg', 'outTemp', 'archive') timespan = weeutil.weeutil.TimeSpan(1573245000, 1573246800) timespan = weeutil.weeutil.TimeSpan(1573245000,", "(SELECT ch8_a_energy2 FROM archive WHERE dateTime=%(start)s)) / (%(stop)s - %(start)s)", "b.dateTime-a.dateTime as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a", "WHERE c.dateTime<=(1573245000+600)) AND a.dateTime = (SELECT MIN(dateTime) FROM archive WHERE", "%(start)s) FROM archive WHERE dateTime=%(stop)s;\" SQL_TEMPLATE = \"\"\"Select a.dateTime as", "print_function import time import weeutil.weeutil import weewx.manager import weewx.xtypes archive_sqlite", "print('timespan=', timespan) with weewx.manager.Manager.open(archive_sqlite) as db_manager: interpolate_dict = { 'aggregate_type':", "'weedb.mysql'} sql_str = \"SELECT %s(%s), MIN(usUnits), MAX(usUnits) FROM %s \"", ", b.dateTime-a.dateTime as TimeChange , b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive", "EndTime, b.dateTime-a.dateTime as TimeChange, b.ch8_a_energy2-a.ch8_a_energy2 as ValueChange FROM archive a,", "archive a, archive b WHERE b.dateTime = (Select MAX(c.dateTime) FROM", "MAX(dateTime) FROM archive WHERE dateTime <= %(stop)s) \"\\ \"AND a.dateTime", "AND b.dateTime<=(1573245000 + 600)\"\"\" SQL_TEMPLATE = \"\"\"Select a.dateTime as StartTime,", "'diff', 'obs_type': 'ch8_a_energy2', 'table_name': db_manager.table_name, 'start': timespan.start, 'stop': timespan.stop, }", "FROM archive a, archive b WHERE b.dateTime = (Select MAX(dateTime)", "SQL_TEMPLATE % interpolate_dict print(sql_stmt) # Get the number of records" ]
[ "\"limit_price\": -1, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert", "most 12 characters', 'type': 'value_error.any_str.max_length' }] } def test_post_orders4(): response", "from fastapi.testclient import TestClient from fast_lemon_api import app client =", "1, \"valid_until\": 1996943663, \"status\": \"open\" } order_id = None def", "0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code", "== { 'detail': [{ 'ctx': { 'limit_value': 0 }, 'loc':", "'ctx': { 'limit_value': 0 }, 'loc': ['body', 'quantity'], 'msg': 'ensure", "'msg': 'value is not a valid integer', 'type': 'type_error.integer' }]", "= client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"BUY!\", \"quantity\":", "characters', 'type': 'value_error.any_str.min_length', 'ctx': { 'limit_value': 12 } }] }", "'ctx': { 'limit_value': 0 }, 'loc': ['body', 'limit_price'], 'msg': 'ensure", "1, \"valid_until\": 1996950863 }) assert response.status_code == 422 assert response.json()", "1.1, \"valid_until\": 1996950863 }) assert response.status_code == 422 assert response.json()", "[{ 'loc': ['body', 'valid_until'], 'msg': 'valid_until cannot be in the", "'valid_until cannot be in the past', 'type': 'value_error' }] }", "[{ 'ctx': { 'limit_value': 0 }, 'loc': ['body', 'limit_price'], 'msg':", "'isin'], 'msg': 'ensure this value has at most 12 characters',", "assert response.json() == { 'detail': [{ 'ctx': { 'enum_values': ['buy',", "\"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663,", "json={ \"isin\": \"blablablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\":", "\"blablablabla\", \"limit_price\": -1, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 })", "\"side\": \"SELL\", \"quantity\": 1.1, \"valid_until\": 1996950863 }) assert response.status_code ==", "= j.pop('uuid') assert j == neworder #assert 0 def test_post_orders2():", "['body', 'isin'], 'msg': 'ensure this value has at most 12", "\"quantity\": 1, \"valid_until\": 1996943663, }) assert response.status_code == 201 j", "assert j == neworder #assert 0 def test_post_orders2(): response =", "} def test_post_orders4(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\":", "'quantity'], 'msg': 'ensure this value is greater than 0', 'type':", "json={ \"isin\": \"blablablabla\", \"limit_price\": -1, \"side\": \"buy\", \"quantity\": 1, \"valid_until\":", "12 } }] } def test_post_orders3(): response = client.post('/orders/', json={", "0.33333, \"side\": \"SELL\", \"quantity\": 0, \"valid_until\": 1996950863 }) assert response.status_code", "0 }, 'loc': ['body', 'quantity'], 'msg': 'ensure this value is", "\"BUY!\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code == 422", "characters', 'type': 'value_error.any_str.max_length' }] } def test_post_orders4(): response = client.post('/orders/',", "\"blablablabla\", \"limit_price\": 0.2, \"side\": \"BUY!\", \"quantity\": 1, \"valid_until\": 1996950863 })", "def test_post_orders1(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2,", "json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\":", "'type': 'type_error.enum' }] } def test_post_orders6(): response = client.post('/orders/', json={", "\"isin\": \"blablablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863", "order_id = j.pop('uuid') assert j == neworder #assert 0 def", "'limit_value': 0 }, 'loc': ['body', 'quantity'], 'msg': 'ensure this value", "test_get_root(): response = client.get(\"/\") assert response.status_code == 200 assert response.text", "12 characters', 'type': 'value_error.any_str.min_length', 'ctx': { 'limit_value': 12 } }]", "client.get(\"/\") assert response.status_code == 200 assert response.text == \"Welcome to", "a valid enumeration member; permitted: 'buy', 'sell'\", 'type': 'type_error.enum' }]", "\"buy\", \"quantity\": 1, \"valid_until\": 1996943663, }) assert response.status_code == 201", "\"buy\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code == 422", "} def test_post_orders8(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\":", "response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": -1, \"side\": \"buy\",", "at least 12 characters', 'type': 'value_error.any_str.min_length', 'ctx': { 'limit_value': 12", "'msg': 'ensure this value is greater than 0', 'type': 'value_error.number.not_gt'", "}] } def test_post_orders5(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\",", "0.2, \"side\": \"SELL\", \"quantity\": 1.1, \"valid_until\": 1996950863 }) assert response.status_code", "} def test_post_orders7(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\":", "'limit_price'], 'msg': 'ensure this value is greater than 0', 'type':", "greater than 0', 'type': 'value_error.number.not_gt' }] } def test_post_orders8(): response", "\"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 2, \"valid_until\": 1996 }) assert", "0', 'type': 'value_error.number.not_gt' }] } def test_post_orders5(): response = client.post('/orders/',", "None def test_post_orders1(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\":", "0.2, \"side\": \"BUY!\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code", "= client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": -1, \"side\": \"buy\", \"quantity\":", "{ 'enum_values': ['buy', 'sell'] }, 'loc': ['body', 'side'], 'msg': \"value", "\"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 2, \"valid_until\": 1996", "permitted: 'buy', 'sell'\", 'type': 'type_error.enum' }] } def test_post_orders6(): response", "response.status_code == 201 j = response.json() #print(repr(j)) order_id = j.pop('uuid')", "'ensure this value has at least 12 characters', 'type': 'value_error.any_str.min_length',", "{ 'detail': [{ 'loc': ['body', 'quantity'], 'msg': 'value is not", "'ctx': { 'limit_value': 12 }, 'loc': ['body', 'isin'], 'msg': 'ensure", "has at least 12 characters', 'type': 'value_error.any_str.min_length', 'ctx': { 'limit_value':", "'loc': ['body', 'limit_price'], 'msg': 'ensure this value is greater than", "[{ 'ctx': { 'limit_value': 12 }, 'loc': ['body', 'isin'], 'msg':", "member; permitted: 'buy', 'sell'\", 'type': 'type_error.enum' }] } def test_post_orders6():", "}] } def test_post_orders7(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\",", "valid enumeration member; permitted: 'buy', 'sell'\", 'type': 'type_error.enum' }] }", "'type_error.enum' }] } def test_post_orders6(): response = client.post('/orders/', json={ \"isin\":", "[{ 'ctx': { 'enum_values': ['buy', 'sell'] }, 'loc': ['body', 'side'],", "[{ 'ctx': { 'limit_value': 0 }, 'loc': ['body', 'quantity'], 'msg':", "{ 'detail': [{ 'ctx': { 'enum_values': ['buy', 'sell'] }, 'loc':", "json={ \"isin\": \"blablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\":", "= client.get(\"/\") assert response.status_code == 200 assert response.text == \"Welcome", "'msg': 'valid_until cannot be in the past', 'type': 'value_error' }]", "'detail': [{ 'ctx': { 'limit_value': 0 }, 'loc': ['body', 'limit_price'],", "is greater than 0', 'type': 'value_error.number.not_gt' }] } def test_post_orders8():", "j.pop('uuid') assert j == neworder #assert 0 def test_post_orders2(): response", "0, \"valid_until\": 1996950863 }) assert response.status_code == 422 assert response.json()", "assert response.text == \"Welcome to the fast-lemon-api!\\n\" neworder = {", "than 0', 'type': 'value_error.number.not_gt' }] } def test_post_orders5(): response =", "test_post_orders6(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.33333, \"side\":", "\"SELL\", \"quantity\": 2, \"valid_until\": 1996 }) assert response.status_code == 422", "\"isin\": \"blablablabla\", \"limit_price\": -1, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863", "== { 'detail': [{ 'ctx': { 'enum_values': ['buy', 'sell'] },", "['body', 'limit_price'], 'msg': 'ensure this value is greater than 0',", "== 422 assert response.json() == { 'detail': [{ 'loc': ['body',", "'msg': 'ensure this value has at least 12 characters', 'type':", "has at most 12 characters', 'type': 'value_error.any_str.max_length' }] } def", "== { 'detail': [{ 'ctx': { 'limit_value': 12 }, 'loc':", "at most 12 characters', 'type': 'value_error.any_str.max_length' }] } def test_post_orders4():", "\"valid_until\": 1996950863 }) assert response.status_code == 422 assert response.json() ==", "enumeration member; permitted: 'buy', 'sell'\", 'type': 'type_error.enum' }] } def", "= client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\":", "fastapi.testclient import TestClient from fast_lemon_api import app client = TestClient(app)", "def test_get_root(): response = client.get(\"/\") assert response.status_code == 200 assert", "0.2, \"side\": \"SELL\", \"quantity\": 2, \"valid_until\": 1996 }) assert response.status_code", "'loc': ['body', 'valid_until'], 'msg': 'valid_until cannot be in the past',", "test_post_orders8(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\":", "\"blablablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 })", "\"side\": \"BUY!\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code ==", "\"open\" } order_id = None def test_post_orders1(): response = client.post('/orders/',", "'sell'] }, 'loc': ['body', 'side'], 'msg': \"value is not a", "valid integer', 'type': 'type_error.integer' }] } def test_post_orders7(): response =", "json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 2, \"valid_until\":", "== \"Welcome to the fast-lemon-api!\\n\" neworder = { \"isin\": \"blablablabla\",", "\"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 2, \"valid_until\": 1996 })", "\"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, \"status\": \"open\" } order_id", "\"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, }) assert response.status_code ==", "'valid_until'], 'msg': 'valid_until cannot be in the past', 'type': 'value_error'", "'buy', 'sell'\", 'type': 'type_error.enum' }] } def test_post_orders6(): response =", "assert response.json() == { 'detail': [{ 'loc': ['body', 'valid_until'], 'msg':", "def test_post_orders8(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2,", "client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.33333, \"side\": \"SELL\", \"quantity\": 0,", "'ensure this value is greater than 0', 'type': 'value_error.number.not_gt' }]", "'value_error.any_str.max_length' }] } def test_post_orders4(): response = client.post('/orders/', json={ \"isin\":", "1996943663, }) assert response.status_code == 201 j = response.json() #print(repr(j))", "test_post_orders3(): response = client.post('/orders/', json={ \"isin\": \"blablablablabla\", \"limit_price\": 0.2, \"side\":", "= client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\":", "response.json() == { 'detail': [{ 'ctx': { 'limit_value': 12 },", "def test_post_orders3(): response = client.post('/orders/', json={ \"isin\": \"blablablablabla\", \"limit_price\": 0.2,", "TestClient(app) def test_get_root(): response = client.get(\"/\") assert response.status_code == 200", "client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1,", "test_post_orders2(): response = client.post('/orders/', json={ \"isin\": \"blablabla\", \"limit_price\": 0.2, \"side\":", "{ 'limit_value': 12 } }] } def test_post_orders3(): response =", "= client.post('/orders/', json={ \"isin\": \"blablablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\":", "\"limit_price\": 0.33333, \"side\": \"SELL\", \"quantity\": 0, \"valid_until\": 1996950863 }) assert", "'type': 'value_error.number.not_gt' }] } def test_post_orders5(): response = client.post('/orders/', json={", "response.status_code == 200 assert response.text == \"Welcome to the fast-lemon-api!\\n\"", "test_post_orders1(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\":", "TestClient from fast_lemon_api import app client = TestClient(app) def test_get_root():", "response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"BUY!\",", "['buy', 'sell'] }, 'loc': ['body', 'side'], 'msg': \"value is not", "json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"BUY!\", \"quantity\": 1, \"valid_until\":", "'sell'\", 'type': 'type_error.enum' }] } def test_post_orders6(): response = client.post('/orders/',", "} }] } def test_post_orders3(): response = client.post('/orders/', json={ \"isin\":", "\"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 1.1, \"valid_until\": 1996950863 }) assert", "app client = TestClient(app) def test_get_root(): response = client.get(\"/\") assert", "} def test_post_orders3(): response = client.post('/orders/', json={ \"isin\": \"blablablablabla\", \"limit_price\":", "422 assert response.json() == { 'detail': [{ 'ctx': { 'limit_value':", "value is greater than 0', 'type': 'value_error.number.not_gt' }] } def", "assert response.status_code == 200 assert response.text == \"Welcome to the", "def test_post_orders7(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2,", "'value_error.number.not_gt' }] } def test_post_orders8(): response = client.post('/orders/', json={ \"isin\":", "response.json() == { 'detail': [{ 'loc': ['body', 'valid_until'], 'msg': 'valid_until", "\"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert", "= TestClient(app) def test_get_root(): response = client.get(\"/\") assert response.status_code ==", "\"limit_price\": 0.2, \"side\": \"BUY!\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert", "12 }, 'loc': ['body', 'isin'], 'msg': 'ensure this value has", "\"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, })", "client.post('/orders/', json={ \"isin\": \"blablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1,", "== { 'detail': [{ 'loc': ['body', 'valid_until'], 'msg': 'valid_until cannot", "client = TestClient(app) def test_get_root(): response = client.get(\"/\") assert response.status_code", "<gh_stars>0 #!/usr/bin/env pytest-3 from fastapi.testclient import TestClient from fast_lemon_api import", "not a valid enumeration member; permitted: 'buy', 'sell'\", 'type': 'type_error.enum'", "response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\",", "json={ \"isin\": \"blablablabla\", \"limit_price\": 0.33333, \"side\": \"SELL\", \"quantity\": 0, \"valid_until\":", "client.post('/orders/', json={ \"isin\": \"blablablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1,", "test_post_orders5(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\":", "\"valid_until\": 1996943663, }) assert response.status_code == 201 j = response.json()", "#!/usr/bin/env pytest-3 from fastapi.testclient import TestClient from fast_lemon_api import app", "['body', 'isin'], 'msg': 'ensure this value has at least 12", "\"quantity\": 1, \"valid_until\": 1996943663, \"status\": \"open\" } order_id = None", "\"value is not a valid enumeration member; permitted: 'buy', 'sell'\",", "}) assert response.status_code == 201 j = response.json() #print(repr(j)) order_id", "{ 'detail': [{ 'loc': ['body', 'valid_until'], 'msg': 'valid_until cannot be", "'loc': ['body', 'quantity'], 'msg': 'ensure this value is greater than", "to the fast-lemon-api!\\n\" neworder = { \"isin\": \"blablablabla\", \"limit_price\": 0.2,", "value has at most 12 characters', 'type': 'value_error.any_str.max_length' }] }", "this value has at least 12 characters', 'type': 'value_error.any_str.min_length', 'ctx':", "fast_lemon_api import app client = TestClient(app) def test_get_root(): response =", "response.status_code == 422 assert response.json() == { 'detail': [{ 'ctx':", "422 assert response.json() == { 'detail': [{ 'loc': ['body', 'isin'],", "assert response.json() == { 'detail': [{ 'loc': ['body', 'quantity'], 'msg':", "}, 'loc': ['body', 'quantity'], 'msg': 'ensure this value is greater", "json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 1.1, \"valid_until\":", "\"SELL\", \"quantity\": 0, \"valid_until\": 1996950863 }) assert response.status_code == 422", "12 characters', 'type': 'value_error.any_str.max_length' }] } def test_post_orders4(): response =", "\"blablablabla\", \"limit_price\": 0.33333, \"side\": \"SELL\", \"quantity\": 0, \"valid_until\": 1996950863 })", "}, 'loc': ['body', 'side'], 'msg': \"value is not a valid", "'loc': ['body', 'quantity'], 'msg': 'value is not a valid integer',", "neworder = { \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\":", "'type_error.integer' }] } def test_post_orders7(): response = client.post('/orders/', json={ \"isin\":", "\"isin\": \"blablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863", "neworder #assert 0 def test_post_orders2(): response = client.post('/orders/', json={ \"isin\":", "{ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\":", "#print(repr(j)) order_id = j.pop('uuid') assert j == neworder #assert 0", "1, \"valid_until\": 1996943663, }) assert response.status_code == 201 j =", "['body', 'quantity'], 'msg': 'value is not a valid integer', 'type':", "'type': 'value_error.number.not_gt' }] } def test_post_orders8(): response = client.post('/orders/', json={", "1996943663, \"status\": \"open\" } order_id = None def test_post_orders1(): response", "\"SELL\", \"quantity\": 1.1, \"valid_until\": 1996950863 }) assert response.status_code == 422", "1996 }) assert response.status_code == 422 assert response.json() == {", "is greater than 0', 'type': 'value_error.number.not_gt' }] } def test_post_orders5():", "\"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code ==", "client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"BUY!\", \"quantity\": 1,", "assert response.status_code == 422 assert response.json() == { 'detail': [{", "'type': 'type_error.integer' }] } def test_post_orders7(): response = client.post('/orders/', json={", "response = client.post('/orders/', json={ \"isin\": \"blablabla\", \"limit_price\": 0.2, \"side\": \"buy\",", "}] } def test_post_orders4(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\",", "response.status_code == 422 assert response.json() == { 'detail': [{ 'loc':", "client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": -1, \"side\": \"buy\", \"quantity\": 1,", "fast-lemon-api!\\n\" neworder = { \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\",", "= client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.33333, \"side\": \"SELL\", \"quantity\":", "this value has at most 12 characters', 'type': 'value_error.any_str.max_length' }]", "j == neworder #assert 0 def test_post_orders2(): response = client.post('/orders/',", "assert response.json() == { 'detail': [{ 'loc': ['body', 'isin'], 'msg':", "} def test_post_orders5(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\":", "greater than 0', 'type': 'value_error.number.not_gt' }] } def test_post_orders5(): response", "'limit_value': 0 }, 'loc': ['body', 'limit_price'], 'msg': 'ensure this value", "the fast-lemon-api!\\n\" neworder = { \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\":", "'detail': [{ 'loc': ['body', 'quantity'], 'msg': 'value is not a", "not a valid integer', 'type': 'type_error.integer' }] } def test_post_orders7():", "1996950863 }) assert response.status_code == 422 assert response.json() == {", "'type': 'value_error.any_str.min_length', 'ctx': { 'limit_value': 12 } }] } def", "'detail': [{ 'ctx': { 'enum_values': ['buy', 'sell'] }, 'loc': ['body',", "assert response.status_code == 201 j = response.json() #print(repr(j)) order_id =", "= response.json() #print(repr(j)) order_id = j.pop('uuid') assert j == neworder", "== 200 assert response.text == \"Welcome to the fast-lemon-api!\\n\" neworder", "== 201 j = response.json() #print(repr(j)) order_id = j.pop('uuid') assert", "test_post_orders7(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\":", "'msg': 'ensure this value has at most 12 characters', 'type':", "0 def test_post_orders2(): response = client.post('/orders/', json={ \"isin\": \"blablabla\", \"limit_price\":", "'ensure this value has at most 12 characters', 'type': 'value_error.any_str.max_length'", "response = client.post('/orders/', json={ \"isin\": \"blablablablabla\", \"limit_price\": 0.2, \"side\": \"buy\",", "\"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 1.1, \"valid_until\": 1996950863 })", "\"valid_until\": 1996 }) assert response.status_code == 422 assert response.json() ==", "#assert 0 def test_post_orders2(): response = client.post('/orders/', json={ \"isin\": \"blablabla\",", "\"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"BUY!\", \"quantity\": 1, \"valid_until\": 1996950863", "response.json() == { 'detail': [{ 'loc': ['body', 'isin'], 'msg': 'ensure", "} order_id = None def test_post_orders1(): response = client.post('/orders/', json={", "than 0', 'type': 'value_error.number.not_gt' }] } def test_post_orders8(): response =", "['body', 'valid_until'], 'msg': 'valid_until cannot be in the past', 'type':", "= None def test_post_orders1(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\",", "is not a valid integer', 'type': 'type_error.integer' }] } def", "a valid integer', 'type': 'type_error.integer' }] } def test_post_orders7(): response", "'isin'], 'msg': 'ensure this value has at least 12 characters',", "'detail': [{ 'loc': ['body', 'valid_until'], 'msg': 'valid_until cannot be in", "= client.post('/orders/', json={ \"isin\": \"blablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\":", "'loc': ['body', 'isin'], 'msg': 'ensure this value has at least", "}] } def test_post_orders8(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\",", "assert response.json() == { 'detail': [{ 'ctx': { 'limit_value': 12", "== neworder #assert 0 def test_post_orders2(): response = client.post('/orders/', json={", "\"quantity\": 1.1, \"valid_until\": 1996950863 }) assert response.status_code == 422 assert", "{ 'detail': [{ 'ctx': { 'limit_value': 0 }, 'loc': ['body',", "pytest-3 from fastapi.testclient import TestClient from fast_lemon_api import app client", "\"status\": \"open\" } order_id = None def test_post_orders1(): response =", "['body', 'quantity'], 'msg': 'ensure this value is greater than 0',", "201 j = response.json() #print(repr(j)) order_id = j.pop('uuid') assert j", "['body', 'side'], 'msg': \"value is not a valid enumeration member;", "\"quantity\": 2, \"valid_until\": 1996 }) assert response.status_code == 422 assert", "assert response.json() == { 'detail': [{ 'ctx': { 'limit_value': 0", "order_id = None def test_post_orders1(): response = client.post('/orders/', json={ \"isin\":", "response.json() == { 'detail': [{ 'ctx': { 'enum_values': ['buy', 'sell']", "import TestClient from fast_lemon_api import app client = TestClient(app) def", "\"Welcome to the fast-lemon-api!\\n\" neworder = { \"isin\": \"blablablabla\", \"limit_price\":", "'ctx': { 'limit_value': 12 } }] } def test_post_orders3(): response", "'value is not a valid integer', 'type': 'type_error.integer' }] }", "{ 'limit_value': 0 }, 'loc': ['body', 'quantity'], 'msg': 'ensure this", "'type': 'value_error.any_str.max_length' }] } def test_post_orders4(): response = client.post('/orders/', json={", "\"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, \"status\": \"open\"", "least 12 characters', 'type': 'value_error.any_str.min_length', 'ctx': { 'limit_value': 12 }", "'limit_value': 12 } }] } def test_post_orders3(): response = client.post('/orders/',", "[{ 'loc': ['body', 'quantity'], 'msg': 'value is not a valid", "integer', 'type': 'type_error.integer' }] } def test_post_orders7(): response = client.post('/orders/',", "}, 'loc': ['body', 'limit_price'], 'msg': 'ensure this value is greater", "\"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 1.1, \"valid_until\": 1996950863", "response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.33333, \"side\": \"SELL\",", "'loc': ['body', 'side'], 'msg': \"value is not a valid enumeration", "\"valid_until\": 1996943663, \"status\": \"open\" } order_id = None def test_post_orders1():", "} def test_post_orders6(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\":", "\"buy\", \"quantity\": 1, \"valid_until\": 1996943663, \"status\": \"open\" } order_id =", "'ctx': { 'enum_values': ['buy', 'sell'] }, 'loc': ['body', 'side'], 'msg':", "this value is greater than 0', 'type': 'value_error.number.not_gt' }] }", "}] } def test_post_orders3(): response = client.post('/orders/', json={ \"isin\": \"blablablablabla\",", "j = response.json() #print(repr(j)) order_id = j.pop('uuid') assert j ==", "response.json() == { 'detail': [{ 'loc': ['body', 'quantity'], 'msg': 'value", "{ 'detail': [{ 'loc': ['body', 'isin'], 'msg': 'ensure this value", "is not a valid enumeration member; permitted: 'buy', 'sell'\", 'type':", "0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, \"status\": \"open\" }", "response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\",", "def test_post_orders2(): response = client.post('/orders/', json={ \"isin\": \"blablabla\", \"limit_price\": 0.2,", "422 assert response.json() == { 'detail': [{ 'loc': ['body', 'quantity'],", "\"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, \"status\":", "'msg': \"value is not a valid enumeration member; permitted: 'buy',", "== 422 assert response.json() == { 'detail': [{ 'ctx': {", "\"side\": \"SELL\", \"quantity\": 2, \"valid_until\": 1996 }) assert response.status_code ==", "}) assert response.status_code == 422 assert response.json() == { 'detail':", "== { 'detail': [{ 'loc': ['body', 'isin'], 'msg': 'ensure this", "{ 'limit_value': 0 }, 'loc': ['body', 'limit_price'], 'msg': 'ensure this", "0 }, 'loc': ['body', 'limit_price'], 'msg': 'ensure this value is", "'quantity'], 'msg': 'value is not a valid integer', 'type': 'type_error.integer'", "'limit_value': 12 }, 'loc': ['body', 'isin'], 'msg': 'ensure this value", "\"blablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 })", "'detail': [{ 'loc': ['body', 'isin'], 'msg': 'ensure this value has", "0', 'type': 'value_error.number.not_gt' }] } def test_post_orders8(): response = client.post('/orders/',", "test_post_orders4(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": -1, \"side\":", "from fast_lemon_api import app client = TestClient(app) def test_get_root(): response", "response.text == \"Welcome to the fast-lemon-api!\\n\" neworder = { \"isin\":", "client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 2,", "client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"SELL\", \"quantity\": 1.1,", "}] } def test_post_orders6(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\",", "'loc': ['body', 'isin'], 'msg': 'ensure this value has at most", "response.json() == { 'detail': [{ 'ctx': { 'limit_value': 0 },", "== { 'detail': [{ 'loc': ['body', 'quantity'], 'msg': 'value is", "'enum_values': ['buy', 'sell'] }, 'loc': ['body', 'side'], 'msg': \"value is", "value has at least 12 characters', 'type': 'value_error.any_str.min_length', 'ctx': {", "'detail': [{ 'ctx': { 'limit_value': 12 }, 'loc': ['body', 'isin'],", "'value_error.number.not_gt' }] } def test_post_orders5(): response = client.post('/orders/', json={ \"isin\":", "'detail': [{ 'ctx': { 'limit_value': 0 }, 'loc': ['body', 'quantity'],", "422 assert response.json() == { 'detail': [{ 'loc': ['body', 'valid_until'],", "'side'], 'msg': \"value is not a valid enumeration member; permitted:", "\"quantity\": 0, \"valid_until\": 1996950863 }) assert response.status_code == 422 assert", "response = client.get(\"/\") assert response.status_code == 200 assert response.text ==", "import app client = TestClient(app) def test_get_root(): response = client.get(\"/\")", "def test_post_orders4(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": -1,", "-1, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code", "[{ 'loc': ['body', 'isin'], 'msg': 'ensure this value has at", "\"side\": \"SELL\", \"quantity\": 0, \"valid_until\": 1996950863 }) assert response.status_code ==", "200 assert response.text == \"Welcome to the fast-lemon-api!\\n\" neworder =", "def test_post_orders6(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.33333,", "def test_post_orders5(): response = client.post('/orders/', json={ \"isin\": \"blablablabla\", \"limit_price\": 0.2,", "{ 'detail': [{ 'ctx': { 'limit_value': 12 }, 'loc': ['body',", "}, 'loc': ['body', 'isin'], 'msg': 'ensure this value has at", "\"isin\": \"blablablabla\", \"limit_price\": 0.33333, \"side\": \"SELL\", \"quantity\": 0, \"valid_until\": 1996950863", "0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, }) assert response.status_code", "2, \"valid_until\": 1996 }) assert response.status_code == 422 assert response.json()", "\"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1, \"valid_until\": 1996943663, }) assert", "422 assert response.json() == { 'detail': [{ 'ctx': { 'enum_values':", "\"quantity\": 1, \"valid_until\": 1996950863 }) assert response.status_code == 422 assert", "'value_error.any_str.min_length', 'ctx': { 'limit_value': 12 } }] } def test_post_orders3():", "response.json() #print(repr(j)) order_id = j.pop('uuid') assert j == neworder #assert", "{ 'limit_value': 12 }, 'loc': ['body', 'isin'], 'msg': 'ensure this", "= { \"isin\": \"blablablabla\", \"limit_price\": 0.2, \"side\": \"buy\", \"quantity\": 1," ]
[ "= get_devices_from_authfile(&cfg, username, dev, &n_devs); assert(rc == 1); assert(n_devs ==", "= 0)] line = subprocess.check_output([PUC, \"-n\", p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)',", "checks = \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") == 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") ==", "\" + filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", r,", "\"../pamu2fcfg/pamu2fcfg\" resident = [\"\", \"-r\"] presence = [\"\", \"-P\"] pin", "{ssh}; rc = get_devices_from_authfile(&cfg, username, dev, &n_devs); assert(rc == 1);", "\"\"), (\"-P\", \"-P\")] for p1, p2 in options: filename =", "r + p + v + n print >> sys.stderr,", "\"Generating \" + filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\",", "attributes = matches.group(3), oldformat = 0)] line = subprocess.check_output([PUC, \"-n\",", "credentials print >> sys.stderr, \"Generating single credentials\" for r in", "= subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with", "= \"../pamu2fcfg/pamu2fcfg\" resident = [\"\", \"-r\"] presence = [\"\", \"-P\"]", "n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\",", "as outfile: outfile.write(line) credentials = [Credential(keyhandle = matches.group(1), pubkey =", "\"\"), (\"\", \"-P\"), (\"-P\", \"\"), (\"-P\", \"-P\")] for p1, p2", "+= free.format(i = c) code += free_block + end.format(devices =", "assert(rc == 1); assert(n_devs == {devices}); \"\"\" checks = \"\"\"", "sys PUC = \"../pamu2fcfg/pamu2fcfg\" resident = [\"\", \"-r\"] presence =", "\"-u@USERNAME@\", p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename +", "= 0 def print_test_case(filename, sshformat, credentials): start = \"\"\" cfg.auth_file", "v.keyhandle, pk = v.pubkey, attr = v.attributes, old = v.oldformat)", "import subprocess import sys PUC = \"../pamu2fcfg/pamu2fcfg\" resident = [\"\",", "subprocess import sys PUC = \"../pamu2fcfg/pamu2fcfg\" resident = [\"\", \"-r\"]", "resident = [\"\", \"-r\"] presence = [\"\", \"-P\"] pin =", "start = \"\"\" cfg.auth_file = \"{authfile}\"; cfg.sshformat = {ssh}; rc", "{old}); \"\"\" free = \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\"", "p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\",", "re.M) with open(filename + \".templ\", \"w\") as outfile: outfile.write(line) credentials", "outfile: outfile.write(line) credentials = [Credential(keyhandle = matches.group(1), pubkey = matches.group(2),", "= matches.group(2), attributes = matches.group(3), oldformat = 0)] print_test_case(filename +", "= matches.group(1), pubkey = matches.group(2), attributes = matches.group(3), oldformat =", "import collections import re import subprocess import sys PUC =", "(\"\", \"-P\"), (\"-P\", \"\"), (\"-P\", \"-P\")] for p1, p2 in", "sshformat, credentials): start = \"\"\" cfg.auth_file = \"{authfile}\"; cfg.sshformat =", "filename = \"credentials/new_double_\" + r + p + v +", "pubkey = matches.group(2), attributes = matches.group(3), oldformat = 0)] line", "oldformat = 0)] print_test_case(filename + \".cred\", sshformat, credentials) # Double", "\"credentials/new_mixed_\" + p1 +\"1\" + p2 + \"2\" print >>", "pubkey attributes oldformat\") sshformat = 0 def print_test_case(filename, sshformat, credentials):", "[(\"\", \"\"), (\"\", \"-P\"), (\"-P\", \"\"), (\"-P\", \"-P\")] for p1,", "pubkey = matches.group(2), attributes = matches.group(3), oldformat = 0)] print_test_case(filename", "== 0); assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0); assert(dev[{i}].old_format == {old}); \"\"\"", "= \"{authfile}\"; cfg.sshformat = {ssh}; rc = get_devices_from_authfile(&cfg, username, dev,", "line = subprocess.check_output([PUC, \"-u@USERNAME@\", r, p, v, n]) matches =", "free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end = \"\"\" memset(dev, 0,", "\"{pk}\") == 0); assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0); assert(dev[{i}].old_format == {old});", "v.pubkey, attr = v.attributes, old = v.oldformat) free_block += free.format(i", "\".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", r, p, v, n]) matches", "\"-V\"] Credential = collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes oldformat\") sshformat =", "= sshformat, devices = len(credentials)) for c, v in enumerate(credentials):", "re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\", \"a\") as outfile:", "= \"\" code += start.format(authfile = filename, ssh = sshformat,", "\"{attr}\") == 0); assert(dev[{i}].old_format == {old}); \"\"\" free = \"\"\"", "matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\", \"w\")", "credentials print >> sys.stderr, \"Generating double credentials\" for r in", "0)] line = subprocess.check_output([PUC, \"-n\", r, p, v, n]) matches", "filename, ssh = sshformat, devices = len(credentials)) for c, v", "= v.pubkey, attr = v.attributes, old = v.oldformat) free_block +=", "= \"credentials/new_\" + r + p + v + n", "matches.group(2), attributes = matches.group(3), oldformat = 0)] print_test_case(filename + \".cred\",", "n print >> sys.stderr, \"Generating \" + filename + \".templ\"", "line = subprocess.check_output([PUC, \"-n\", r, p, v, n]) matches =", "in verification: filename = \"credentials/new_double_\" + r + p +", "\"w\") as outfile: outfile.write(line) credentials = [Credential(keyhandle = matches.group(1), pubkey", "pin: for v in verification: filename = \"credentials/new_double_\" + r", "= \"credentials/new_mixed_\" + p1 +\"1\" + p2 + \"2\" print", "\"\"\" end = \"\"\" memset(dev, 0, sizeof(dev_t) * {devices}); \"\"\"", "attributes = matches.group(3), oldformat = 0)] print_test_case(filename + \".cred\", sshformat,", "= collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes oldformat\") sshformat = 0 def", "presence = [\"\", \"-P\"] pin = [\"\", \"-N\"] verification =", "= \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") == 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0);", "p2 in options: filename = \"credentials/new_mixed_\" + p1 +\"1\" +", "Credential = collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes oldformat\") sshformat = 0", "= v.keyhandle, pk = v.pubkey, attr = v.attributes, old =", "outfile: outfile.write(line) credentials += [Credential(keyhandle = matches.group(1), pubkey = matches.group(2),", "+= checks.format(i = c, kh = v.keyhandle, pk = v.pubkey,", "pk = v.pubkey, attr = v.attributes, old = v.oldformat) free_block", "username, dev, &n_devs); assert(rc == 1); assert(n_devs == {devices}); \"\"\"", "credentials) # Double credentials print >> sys.stderr, \"Generating double credentials\"", "+= [Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes = matches.group(3),", "len(credentials)) print(code) # Single credentials print >> sys.stderr, \"Generating single", "== 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0);", "\".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line,", "code += checks.format(i = c, kh = v.keyhandle, pk =", "\".templ\", \"a\") as outfile: outfile.write(line) credentials += [Credential(keyhandle = matches.group(1),", "v.attributes, old = v.oldformat) free_block += free.format(i = c) code", "re.M) with open(filename + \".templ\", \"a\") as outfile: outfile.write(line) credentials", "sshformat = 0 def print_test_case(filename, sshformat, credentials): start = \"\"\"", "0, sizeof(dev_t) * {devices}); \"\"\" code = \"\" free_block =", "in verification: filename = \"credentials/new_\" + r + p +", "# Mixed credentials print >> sys.stderr, \"Mixed double credentials\" options", "[\"\", \"-N\"] verification = [\"\", \"-V\"] Credential = collections.namedtuple(\"Credential\", \"keyhandle", "assert(n_devs == {devices}); \"\"\" checks = \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") ==", "= matches.group(3), oldformat = 0)] line = subprocess.check_output([PUC, \"-n\", r,", "verification: filename = \"credentials/new_\" + r + p + v", "devices = len(credentials)) for c, v in enumerate(credentials): code +=", "= matches.group(2), attributes = matches.group(3), oldformat = 0)] line =", "v in enumerate(credentials): code += checks.format(i = c, kh =", "\"-P\")] for p1, p2 in options: filename = \"credentials/new_mixed_\" +", "oldformat = 0)] print_test_case(filename + \".cred\", sshformat, credentials) # Mixed", "free(dev[{i}].publicKey); \"\"\" end = \"\"\" memset(dev, 0, sizeof(dev_t) * {devices});", "sys.stderr, \"Generating single credentials\" for r in resident: for p", "\"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0); assert(strcmp(dev[{i}].attributes, \"{attr}\") ==", "= \"\" free_block = \"\" code += start.format(authfile = filename,", "sys.stderr, \"Generating \" + filename + \".templ\" line = subprocess.check_output([PUC,", "Double credentials print >> sys.stderr, \"Generating double credentials\" for r", "= c, kh = v.keyhandle, pk = v.pubkey, attr =", "\"\"\" checks = \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") == 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\")", "0); assert(dev[{i}].old_format == {old}); \"\"\" free = \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes);", "= 0)] print_test_case(filename + \".cred\", sshformat, credentials) # Mixed credentials", "\"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") == 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey,", "attr = v.attributes, old = v.oldformat) free_block += free.format(i =", "# Double credentials print >> sys.stderr, \"Generating double credentials\" for", "\"keyhandle pubkey attributes oldformat\") sshformat = 0 def print_test_case(filename, sshformat,", "filename = \"credentials/new_mixed_\" + p1 +\"1\" + p2 + \"2\"", "v.oldformat) free_block += free.format(i = c) code += free_block +", "= [\"\", \"-P\"] pin = [\"\", \"-N\"] verification = [\"\",", "sshformat, credentials) # Mixed credentials print >> sys.stderr, \"Mixed double", "import re import subprocess import sys PUC = \"../pamu2fcfg/pamu2fcfg\" resident", "\".cred\", sshformat, credentials) # Mixed credentials print >> sys.stderr, \"Mixed", "sshformat, credentials) # Double credentials print >> sys.stderr, \"Generating double", "\"2\" print >> sys.stderr, \"Generating \" + filename + \".templ\"", "assert(strcmp(dev[{i}].coseType, \"es256\") == 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\")", "filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches =", "re import subprocess import sys PUC = \"../pamu2fcfg/pamu2fcfg\" resident =", "\"-P\"] pin = [\"\", \"-N\"] verification = [\"\", \"-V\"] Credential", "\" + filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", p1])", "\"\"\" memset(dev, 0, sizeof(dev_t) * {devices}); \"\"\" code = \"\"", "old = v.oldformat) free_block += free.format(i = c) code +=", "free_block + end.format(devices = len(credentials)) print(code) # Single credentials print", "in pin: for v in verification: filename = \"credentials/new_\" +", "= \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end = \"\"\"", "assert(dev[{i}].old_format == {old}); \"\"\" free = \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle);", "for p1, p2 in options: filename = \"credentials/new_mixed_\" + p1", "\"credentials/new_\" + r + p + v + n print", "for p in presence: for n in pin: for v", "subprocess.check_output([PUC, \"-n\", r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line,", "free.format(i = c) code += free_block + end.format(devices = len(credentials))", "re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\", \"w\") as outfile:", "oldformat\") sshformat = 0 def print_test_case(filename, sshformat, credentials): start =", "get_devices_from_authfile(&cfg, username, dev, &n_devs); assert(rc == 1); assert(n_devs == {devices});", "+ \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)',", "as outfile: outfile.write(line) credentials += [Credential(keyhandle = matches.group(1), pubkey =", "print >> sys.stderr, \"Generating single credentials\" for r in resident:", "for v in verification: filename = \"credentials/new_\" + r +", "credentials print >> sys.stderr, \"Mixed double credentials\" options = [(\"\",", "== 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0); assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0);", "free_block = \"\" code += start.format(authfile = filename, ssh =", "= {ssh}; rc = get_devices_from_authfile(&cfg, username, dev, &n_devs); assert(rc ==", "+ \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", r, p, v, n])", "pin = [\"\", \"-N\"] verification = [\"\", \"-V\"] Credential =", "= \"credentials/new_double_\" + r + p + v + n", "line = subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M)", "= [Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes = matches.group(3),", "+ \"2\" print >> sys.stderr, \"Generating \" + filename +", "\"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end = \"\"\" memset(dev,", "n in pin: for v in verification: filename = \"credentials/new_\"", "= matches.group(3), oldformat = 0)] line = subprocess.check_output([PUC, \"-n\", p2])", "0)] line = subprocess.check_output([PUC, \"-n\", p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line,", "[Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes = matches.group(3), oldformat", "\"Generating single credentials\" for r in resident: for p in", "memset(dev, 0, sizeof(dev_t) * {devices}); \"\"\" code = \"\" free_block", "\".cred\", sshformat, credentials) # Double credentials print >> sys.stderr, \"Generating", "rc = get_devices_from_authfile(&cfg, username, dev, &n_devs); assert(rc == 1); assert(n_devs", "0)] print_test_case(filename + \".cred\", sshformat, credentials) # Double credentials print", "v in verification: filename = \"credentials/new_double_\" + r + p", "(\"-P\", \"-P\")] for p1, p2 in options: filename = \"credentials/new_mixed_\"", "= c) code += free_block + end.format(devices = len(credentials)) print(code)", "= \"\"\" memset(dev, 0, sizeof(dev_t) * {devices}); \"\"\" code =", "print >> sys.stderr, \"Generating double credentials\" for r in resident:", "= 0)] line = subprocess.check_output([PUC, \"-n\", r, p, v, n])", "+= start.format(authfile = filename, ssh = sshformat, devices = len(credentials))", "oldformat = 0)] line = subprocess.check_output([PUC, \"-n\", p2]) matches =", "= len(credentials)) print(code) # Single credentials print >> sys.stderr, \"Generating", "ssh = sshformat, devices = len(credentials)) for c, v in", "= [\"\", \"-V\"] Credential = collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes oldformat\")", "pin: for v in verification: filename = \"credentials/new_\" + r", "len(credentials)) for c, v in enumerate(credentials): code += checks.format(i =", "verification = [\"\", \"-V\"] Credential = collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes", "{devices}); \"\"\" checks = \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") == 0); assert(strcmp(dev[{i}].keyHandle,", "in presence: for n in pin: for v in verification:", "# Single credentials print >> sys.stderr, \"Generating single credentials\" for", "[\"\", \"-V\"] Credential = collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes oldformat\") sshformat", "credentials = [Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes =", "\"Mixed double credentials\" options = [(\"\", \"\"), (\"\", \"-P\"), (\"-P\",", "+ v + n print >> sys.stderr, \"Generating \" +", "single credentials\" for r in resident: for p in presence:", "matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\", \"a\")", "PUC = \"../pamu2fcfg/pamu2fcfg\" resident = [\"\", \"-r\"] presence = [\"\",", "subprocess.check_output([PUC, \"-n\", p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename", "in options: filename = \"credentials/new_mixed_\" + p1 +\"1\" + p2", "sizeof(dev_t) * {devices}); \"\"\" code = \"\" free_block = \"\"", "p in presence: for n in pin: for v in", "\"-P\"), (\"-P\", \"\"), (\"-P\", \"-P\")] for p1, p2 in options:", "+ end.format(devices = len(credentials)) print(code) # Single credentials print >>", "code = \"\" free_block = \"\" code += start.format(authfile =", "credentials): start = \"\"\" cfg.auth_file = \"{authfile}\"; cfg.sshformat = {ssh};", "&n_devs); assert(rc == 1); assert(n_devs == {devices}); \"\"\" checks =", "sshformat, devices = len(credentials)) for c, v in enumerate(credentials): code", "+ filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", r, p,", "line, re.M) with open(filename + \".templ\", \"w\") as outfile: outfile.write(line)", "== 0); assert(dev[{i}].old_format == {old}); \"\"\" free = \"\"\" free(dev[{i}].coseType);", "+\"1\" + p2 + \"2\" print >> sys.stderr, \"Generating \"", "code += start.format(authfile = filename, ssh = sshformat, devices =", "0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0); assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0); assert(dev[{i}].old_format", "filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", r, p, v,", "matches.group(3), oldformat = 0)] print_test_case(filename + \".cred\", sshformat, credentials) #", "end.format(devices = len(credentials)) print(code) # Single credentials print >> sys.stderr,", "free_block += free.format(i = c) code += free_block + end.format(devices", "collections import re import subprocess import sys PUC = \"../pamu2fcfg/pamu2fcfg\"", "= re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\", \"w\") as", "p1, p2 in options: filename = \"credentials/new_mixed_\" + p1 +\"1\"", "= [(\"\", \"\"), (\"\", \"-P\"), (\"-P\", \"\"), (\"-P\", \"-P\")] for", "\"credentials/new_double_\" + r + p + v + n print", "= [\"\", \"-N\"] verification = [\"\", \"-V\"] Credential = collections.namedtuple(\"Credential\",", "print(code) # Single credentials print >> sys.stderr, \"Generating single credentials\"", "print >> sys.stderr, \"Mixed double credentials\" options = [(\"\", \"\"),", "credentials\" options = [(\"\", \"\"), (\"\", \"-P\"), (\"-P\", \"\"), (\"-P\",", "\"-r\"] presence = [\"\", \"-P\"] pin = [\"\", \"-N\"] verification", "matches.group(3), oldformat = 0)] line = subprocess.check_output([PUC, \"-n\", r, p,", "+ r + p + v + n print >>", "Mixed credentials print >> sys.stderr, \"Mixed double credentials\" options =", "0); assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0); assert(dev[{i}].old_format == {old}); \"\"\" free", "with open(filename + \".templ\", \"a\") as outfile: outfile.write(line) credentials +=", "= len(credentials)) for c, v in enumerate(credentials): code += checks.format(i", "p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\",", "double credentials\" options = [(\"\", \"\"), (\"\", \"-P\"), (\"-P\", \"\"),", "for c, v in enumerate(credentials): code += checks.format(i = c,", "c, kh = v.keyhandle, pk = v.pubkey, attr = v.attributes,", "(\"-P\", \"\"), (\"-P\", \"-P\")] for p1, p2 in options: filename", "p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename", "\"\"\" code = \"\" free_block = \"\" code += start.format(authfile", "\"\" code += start.format(authfile = filename, ssh = sshformat, devices", ">> sys.stderr, \"Mixed double credentials\" options = [(\"\", \"\"), (\"\",", "options: filename = \"credentials/new_mixed_\" + p1 +\"1\" + p2 +", "checks.format(i = c, kh = v.keyhandle, pk = v.pubkey, attr", "+ p1 +\"1\" + p2 + \"2\" print >> sys.stderr,", "\"Generating double credentials\" for r in resident: for p in", "[\"\", \"-P\"] pin = [\"\", \"-N\"] verification = [\"\", \"-V\"]", "= v.attributes, old = v.oldformat) free_block += free.format(i = c)", "= 0)] print_test_case(filename + \".cred\", sshformat, credentials) # Double credentials", "credentials += [Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes =", "+ \".templ\", \"w\") as outfile: outfile.write(line) credentials = [Credential(keyhandle =", "0)] print_test_case(filename + \".cred\", sshformat, credentials) # Mixed credentials print", ">> sys.stderr, \"Generating single credentials\" for r in resident: for", "double credentials\" for r in resident: for p in presence:", "p1 +\"1\" + p2 + \"2\" print >> sys.stderr, \"Generating", ">> sys.stderr, \"Generating double credentials\" for r in resident: for", "+ \".templ\", \"a\") as outfile: outfile.write(line) credentials += [Credential(keyhandle =", "== 1); assert(n_devs == {devices}); \"\"\" checks = \"\"\" assert(strcmp(dev[{i}].coseType,", "print >> sys.stderr, \"Generating \" + filename + \".templ\" line", "open(filename + \".templ\", \"w\") as outfile: outfile.write(line) credentials = [Credential(keyhandle", "= subprocess.check_output([PUC, \"-n\", r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)',", "credentials\" for r in resident: for p in presence: for", "credentials) # Mixed credentials print >> sys.stderr, \"Mixed double credentials\"", "kh = v.keyhandle, pk = v.pubkey, attr = v.attributes, old", "v + n print >> sys.stderr, \"Generating \" + filename", "#!/bin/python2 import collections import re import subprocess import sys PUC", "presence: for n in pin: for v in verification: filename", "free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end = \"\"\" memset(dev, 0, sizeof(dev_t) *", "\"-n\", p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename +", "r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with", "Single credentials print >> sys.stderr, \"Generating single credentials\" for r", "for v in verification: filename = \"credentials/new_double_\" + r +", "in enumerate(credentials): code += checks.format(i = c, kh = v.keyhandle,", "import sys PUC = \"../pamu2fcfg/pamu2fcfg\" resident = [\"\", \"-r\"] presence", "matches.group(1), pubkey = matches.group(2), attributes = matches.group(3), oldformat = 0)]", "= matches.group(3), oldformat = 0)] print_test_case(filename + \".cred\", sshformat, credentials)", "+ \".cred\", sshformat, credentials) # Double credentials print >> sys.stderr,", "oldformat = 0)] line = subprocess.check_output([PUC, \"-n\", r, p, v,", "c, v in enumerate(credentials): code += checks.format(i = c, kh", "[\"\", \"-r\"] presence = [\"\", \"-P\"] pin = [\"\", \"-N\"]", "free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end = \"\"\" memset(dev, 0, sizeof(dev_t)", "line, re.M) with open(filename + \".templ\", \"a\") as outfile: outfile.write(line)", "cfg.auth_file = \"{authfile}\"; cfg.sshformat = {ssh}; rc = get_devices_from_authfile(&cfg, username,", "filename = \"credentials/new_\" + r + p + v +", "= re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename + \".templ\", \"a\") as", ">> sys.stderr, \"Generating \" + filename + \".templ\" line =", "in resident: for p in presence: for n in pin:", "= subprocess.check_output([PUC, \"-u@USERNAME@\", r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)',", "\".templ\", \"w\") as outfile: outfile.write(line) credentials = [Credential(keyhandle = matches.group(1),", "\"\" free_block = \"\" code += start.format(authfile = filename, ssh", "open(filename + \".templ\", \"a\") as outfile: outfile.write(line) credentials += [Credential(keyhandle", "= subprocess.check_output([PUC, \"-n\", p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with", "cfg.sshformat = {ssh}; rc = get_devices_from_authfile(&cfg, username, dev, &n_devs); assert(rc", "== {devices}); \"\"\" checks = \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\") == 0);", "\"es256\") == 0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") ==", "free = \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end =", "code += free_block + end.format(devices = len(credentials)) print(code) # Single", "outfile.write(line) credentials = [Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes", "\"-n\", r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M)", "+ p + v + n print >> sys.stderr, \"Generating", "dev, &n_devs); assert(rc == 1); assert(n_devs == {devices}); \"\"\" checks", "matches.group(3), oldformat = 0)] line = subprocess.check_output([PUC, \"-n\", p2]) matches", "in pin: for v in verification: filename = \"credentials/new_double_\" +", "print_test_case(filename + \".cred\", sshformat, credentials) # Double credentials print >>", "verification: filename = \"credentials/new_double_\" + r + p + v", "{devices}); \"\"\" code = \"\" free_block = \"\" code +=", "n in pin: for v in verification: filename = \"credentials/new_double_\"", "v in verification: filename = \"credentials/new_\" + r + p", "+= free_block + end.format(devices = len(credentials)) print(code) # Single credentials", "options = [(\"\", \"\"), (\"\", \"-P\"), (\"-P\", \"\"), (\"-P\", \"-P\")]", "+ filename + \".templ\" line = subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches", "start.format(authfile = filename, ssh = sshformat, devices = len(credentials)) for", "\"-u@USERNAME@\", r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M)", "def print_test_case(filename, sshformat, credentials): start = \"\"\" cfg.auth_file = \"{authfile}\";", "v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename +", "1); assert(n_devs == {devices}); \"\"\" checks = \"\"\" assert(strcmp(dev[{i}].coseType, \"es256\")", "collections.namedtuple(\"Credential\", \"keyhandle pubkey attributes oldformat\") sshformat = 0 def print_test_case(filename,", "sys.stderr, \"Mixed double credentials\" options = [(\"\", \"\"), (\"\", \"-P\"),", "for n in pin: for v in verification: filename =", "= v.oldformat) free_block += free.format(i = c) code += free_block", "assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0); assert(strcmp(dev[{i}].attributes, \"{attr}\")", "assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0); assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0); assert(dev[{i}].old_format ==", "print_test_case(filename + \".cred\", sshformat, credentials) # Mixed credentials print >>", "+ p2 + \"2\" print >> sys.stderr, \"Generating \" +", "\"\"\" cfg.auth_file = \"{authfile}\"; cfg.sshformat = {ssh}; rc = get_devices_from_authfile(&cfg,", "for r in resident: for p in presence: for n", "\"a\") as outfile: outfile.write(line) credentials += [Credential(keyhandle = matches.group(1), pubkey", "attributes oldformat\") sshformat = 0 def print_test_case(filename, sshformat, credentials): start", "assert(strcmp(dev[{i}].attributes, \"{attr}\") == 0); assert(dev[{i}].old_format == {old}); \"\"\" free =", "print_test_case(filename, sshformat, credentials): start = \"\"\" cfg.auth_file = \"{authfile}\"; cfg.sshformat", "c) code += free_block + end.format(devices = len(credentials)) print(code) #", "with open(filename + \".templ\", \"w\") as outfile: outfile.write(line) credentials =", "\"-N\"] verification = [\"\", \"-V\"] Credential = collections.namedtuple(\"Credential\", \"keyhandle pubkey", "= [\"\", \"-r\"] presence = [\"\", \"-P\"] pin = [\"\",", "sys.stderr, \"Generating double credentials\" for r in resident: for p", "end = \"\"\" memset(dev, 0, sizeof(dev_t) * {devices}); \"\"\" code", "resident: for p in presence: for n in pin: for", "p2 + \"2\" print >> sys.stderr, \"Generating \" + filename", "\"\"\" free = \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey); \"\"\" end", "= filename, ssh = sshformat, devices = len(credentials)) for c,", "subprocess.check_output([PUC, \"-u@USERNAME@\", r, p, v, n]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line,", "+ n print >> sys.stderr, \"Generating \" + filename +", "enumerate(credentials): code += checks.format(i = c, kh = v.keyhandle, pk", "0 def print_test_case(filename, sshformat, credentials): start = \"\"\" cfg.auth_file =", "+ \".cred\", sshformat, credentials) # Mixed credentials print >> sys.stderr,", "line = subprocess.check_output([PUC, \"-n\", p2]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M)", "== {old}); \"\"\" free = \"\"\" free(dev[{i}].coseType); free(dev[{i}].attributes); free(dev[{i}].keyHandle); free(dev[{i}].publicKey);", "subprocess.check_output([PUC, \"-u@USERNAME@\", p1]) matches = re.match(r'^.*?:(.*?),(.*?),es256,(.*)', line, re.M) with open(filename", "\"{authfile}\"; cfg.sshformat = {ssh}; rc = get_devices_from_authfile(&cfg, username, dev, &n_devs);", "= \"\"\" cfg.auth_file = \"{authfile}\"; cfg.sshformat = {ssh}; rc =", "0); assert(strcmp(dev[{i}].keyHandle, \"{kh}\") == 0); assert(strcmp(dev[{i}].publicKey, \"{pk}\") == 0); assert(strcmp(dev[{i}].attributes,", "matches.group(2), attributes = matches.group(3), oldformat = 0)] line = subprocess.check_output([PUC,", "outfile.write(line) credentials += [Credential(keyhandle = matches.group(1), pubkey = matches.group(2), attributes", "* {devices}); \"\"\" code = \"\" free_block = \"\" code", "r in resident: for p in presence: for n in", "p + v + n print >> sys.stderr, \"Generating \"" ]
[ "= np.float64([1.]) # One batch, scalar. # Corresponds to scale", "# ============================================================================== \"\"\"Affine Scalar Tests.\"\"\" from __future__ import absolute_import from", "2.0 (the \"License\"); # you may not use this file", "mu = -1. # Corresponds to scale = 2 bijector", "mu = np.float64([1.]) # One batch, scalar. # Corresponds to", "AffineScalar(shift=mu) x = [1., 1] # One sample from each", "self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as sess: def", "x_value}) for run in (static_run, dynamic_run): mu = np.float64([1.]) #", "two batches. self.assertAllClose([3., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x))", "1. bijector = AffineScalar(shift=mu) x = [1., 1] # One", "from tensorflow.python.platform import test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of the", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self):", "for run in (static_run, dynamic_run): mu = -1. # Corresponds", "def dynamic_run(fun, x_value): x_value = np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64, name=\"x\")", "Tests.\"\"\" from __future__ import absolute_import from __future__ import division from", "def testScalarCongruency(self): with self.test_session(): bijector = AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2.,", "x = np.float64([1.]) # One sample from one batches. self.assertAllClose([2.],", "correctness of the Y = scale @ x + shift", "array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run,", "self.assertAllClose([1., 3, 5], run(bijector.forward, x)) self.assertAllClose([1., 1.5, 2.], run(bijector.inverse, x))", "testProperties(self): with self.test_session(): mu = -1. # scale corresponds to", "scale corresponds to 1. bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def", "np.float64([2.]) # One batch, scalar. # Corresponds to scale =", "1. bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self): with self.test_session()", "of two batches. self.assertAllClose([3., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse,", "use this file except in compliance with the License. #", "from each of two batches. self.assertAllClose([3., 0], run(bijector.forward, x)) self.assertAllClose([0.,", "scalar. # Corresponds to scale = 1. bijector = AffineScalar(shift=mu)", "dynamic_run): mu = -1. # Corresponds to scale = 2", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "in (static_run, dynamic_run): mu = np.float64([1.]) # One batch, scalar.", "2, 3] # Three scalar samples (no batches). self.assertAllClose([1., 3,", "License. # You may obtain a copy of the License", "The TensorFlow Authors. All Rights Reserved. # # Licensed under", "self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as", "testTwoBatchScalarIdentityViaScale(self): with self.test_session() as sess: def static_run(fun, x): return fun(x).eval()", "return sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run, dynamic_run): multiplier", "under the License is distributed on an \"AS IS\" BASIS,", "License for the specific language governing permissions and # limitations", "Corresponds to scale = 1. bijector = AffineScalar(shift=mu, scale=[2., 1])", "as sess: def static_run(fun, x): return fun(x).eval() def dynamic_run(fun, x_value):", "@ x + shift transformation.\"\"\" def testProperties(self): with self.test_session(): mu", "Reserved. # # Licensed under the Apache License, Version 2.0", "bijector = AffineScalar(shift=mu, scale=[2., 1]) x = [1., 1] #", "governing permissions and # limitations under the License. # ==============================================================================", "= 2, shift = 0. bijector = AffineScalar(scale=multiplier) x =", "import array_ops from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from tensorflow.python.platform import test", "run in (static_run, dynamic_run): multiplier = np.float64([2.]) # One batch,", "from __future__ import division from __future__ import print_function import numpy", "Copyright 2016 The TensorFlow Authors. All Rights Reserved. # #", "# One sample from each of two batches. self.assertAllClose([2., 0],", "TensorFlow Authors. All Rights Reserved. # # Licensed under the", "import division from __future__ import print_function import numpy as np", "run in (static_run, dynamic_run): mu = np.float64([1.]) # One batch,", "dynamic_run): mu = [1., -1] # Univariate, two batches. #", "self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self): with self.test_session(): bijector", "from __future__ import absolute_import from __future__ import division from __future__", "in compliance with the License. # You may obtain a", "def dynamic_run(fun, x_value): x_value = np.array(x_value) x = array_ops.placeholder(dtypes.float32, name=\"x\")", "software # distributed under the License is distributed on an", "import AffineScalar from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops", "1] # One sample from each of two batches. self.assertAllClose([2.,", "self.test_session(): mu = -1. # scale corresponds to 1. bijector", "two batches. self.assertAllClose([2., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x))", "# One sample from each of two batches. self.assertAllClose([3., 0],", "-1. # Corresponds to scale = 2 bijector = AffineScalar(shift=mu,", "-1] # Univariate, two batches. # Corresponds to scale =", "from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops.distributions.bijector_test_util", "batch, scalar. # Corresponds to scale = 2, shift =", "= array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run in", "division from __future__ import print_function import numpy as np from", "= [1., 2, 3] # Three scalar samples (no batches).", "batches. self.assertAllClose([2., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose([0.,", "self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x)) def", "dynamic_run): mu = np.float64([1.]) # One batch, scalar. # Corresponds", "bijector = AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2., upper_x=2.) if __name__ ==", "def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as sess: def static_run(fun, x): return", "[1., 2, 3] # Three scalar samples (no batches). self.assertAllClose([1.,", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from tensorflow.python.framework import dtypes from tensorflow.python.ops import", "2, shift = 0. bijector = AffineScalar(scale=multiplier) x = np.float64([1.])", "import test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of the Y =", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "# Corresponds to scale = 2 bijector = AffineScalar(shift=mu, scale=2.)", "Corresponds to scale = 2 bijector = AffineScalar(shift=mu, scale=2.) x", "limitations under the License. # ============================================================================== \"\"\"Affine Scalar Tests.\"\"\" from", "to in writing, software # distributed under the License is", "scale = 1. bijector = AffineScalar(shift=mu, scale=[2., 1]) x =", "self.assertAllClose([3., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose( [-np.log(2),", "# See the License for the specific language governing permissions", "language governing permissions and # limitations under the License. #", "or agreed to in writing, software # distributed under the", "run in (static_run, dynamic_run): mu = -1. # Corresponds to", "required by applicable law or agreed to in writing, software", "to scale = 1. bijector = AffineScalar(shift=mu) x = np.float64([1.])", "# One sample from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.],", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as sess: def static_run(fun, x): return fun(x).eval()", "with the License. # You may obtain a copy of", "[1., -1] # Univariate, two batches. # Corresponds to scale", "bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self): with self.test_session() as", "= AffineScalar(shift=mu, scale=2.) x = [1., 2, 3] # Three", "1. bijector = AffineScalar(shift=mu, scale=[2., 1]) x = [1., 1]", "One batch, scalar. # Corresponds to scale = 2, shift", "= AffineScalar(shift=mu, scale=[2., 1]) x = [1., 1] # One", "x = array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run", "compliance with the License. # You may obtain a copy", "All Rights Reserved. # # Licensed under the Apache License,", "agreed to in writing, software # distributed under the License", "= AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self): with self.test_session() as sess:", "for run in (static_run, dynamic_run): mu = np.float64([1.]) # One", "License. # ============================================================================== \"\"\"Affine Scalar Tests.\"\"\" from __future__ import absolute_import", "x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as sess:", "distributed under the License is distributed on an \"AS IS\"", "batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x))", "with self.test_session() as sess: def static_run(fun, x): return fun(x).eval() def", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "to 1. bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self): with", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "mu = [1., -1] # Univariate, two batches. # Corresponds", "not use this file except in compliance with the License.", "shift transformation.\"\"\" def testProperties(self): with self.test_session(): mu = -1. #", "dynamic_run(fun, x_value): x_value = np.array(x_value) x = array_ops.placeholder(dtypes.float32, name=\"x\") return", "= -1. # scale corresponds to 1. bijector = AffineScalar(shift=mu)", "static_run(fun, x): return fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value).astype(np.float64)", "run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x))", "x = [1., 1] # One sample from each of", "x)) def testTwoBatchScalarIdentityViaScale(self): with self.test_session() as sess: def static_run(fun, x):", "writing, software # distributed under the License is distributed on", "self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self):", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "for run in (static_run, dynamic_run): mu = [1., -1] #", "CONDITIONS OF ANY KIND, either express or implied. # See", "fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value) x = array_ops.placeholder(dtypes.float32,", "scale @ x + shift transformation.\"\"\" def testProperties(self): with self.test_session():", "with self.test_session(): mu = -1. # scale corresponds to 1.", "def testProperties(self): with self.test_session(): mu = -1. # scale corresponds", "run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as", "2 bijector = AffineScalar(shift=mu, scale=2.) x = [1., 2, 3]", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self): with self.test_session() as sess: def static_run(fun,", "def static_run(fun, x): return fun(x).eval() def dynamic_run(fun, x_value): x_value =", "self.assertAllClose([1., 1.5, 2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian, x))", "multiplier = np.float64([2.]) # One batch, scalar. # Corresponds to", "bijector = AffineScalar(shift=mu) x = np.float64([1.]) # One sample from", "(static_run, dynamic_run): mu = np.float64([1.]) # One batch, scalar. #", "x)) self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self): with", "scale = 1. bijector = AffineScalar(shift=mu) x = [1., 1]", "5], run(bijector.forward, x)) self.assertAllClose([1., 1.5, 2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] *", "def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as sess: def static_run(fun, x): return", "# One sample from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5],", "x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self): with self.test_session() as", "One sample from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse,", "self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self): with self.test_session() as sess:", "import print_function import numpy as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar", "+ shift transformation.\"\"\" def testProperties(self): with self.test_session(): mu = -1.", "from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from tensorflow.python.platform import test class AffineScalarBijectorTest(test.TestCase):", "= AffineScalar(shift=mu) x = np.float64([1.]) # One sample from one", "for run in (static_run, dynamic_run): multiplier = np.float64([2.]) # One", "sample from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse, x))", "OR CONDITIONS OF ANY KIND, either express or implied. #", "0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian,", "the License is distributed on an \"AS IS\" BASIS, #", "x)) def testScalarCongruency(self): with self.test_session(): bijector = AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector,", "feed_dict={x: x_value}) for run in (static_run, dynamic_run): multiplier = np.float64([2.])", "self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def", "AffineScalar(scale=multiplier) x = np.float64([1.]) # One sample from one batches.", "in (static_run, dynamic_run): mu = [1., -1] # Univariate, two", "mu = -1. # scale corresponds to 1. bijector =", "3, run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as sess: def", "in (static_run, dynamic_run): multiplier = np.float64([2.]) # One batch, scalar.", "self.test_session() as sess: def static_run(fun, x): return fun(x).eval() def dynamic_run(fun,", "sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run, dynamic_run): multiplier =", "static_run(fun, x): return fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value)", "from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from tensorflow.python.framework import dtypes from tensorflow.python.ops", "x)) self.assertAllClose([1., 1.5, 2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian,", "run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian,", "AffineScalar(shift=mu, scale=[2., 1]) x = [1., 1] # One sample", "= 1. bijector = AffineScalar(shift=mu, scale=[2., 1]) x = [1.,", "scale=[2., 1]) x = [1., 1] # One sample from", "tensorflow.python.platform import test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of the Y", "law or agreed to in writing, software # distributed under", "2], run(bijector.inverse, x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self): with", "x + shift transformation.\"\"\" def testProperties(self): with self.test_session(): mu =", "0. bijector = AffineScalar(scale=multiplier) x = np.float64([1.]) # One sample", "= [1., -1] # Univariate, two batches. # Corresponds to", "x = array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run", "batches. self.assertAllClose([3., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose(", "0.], run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self): with self.test_session(): bijector = AffineScalar(shift=3.6,", "3] # Three scalar samples (no batches). self.assertAllClose([1., 3, 5],", "sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run, dynamic_run): mu =", "# Corresponds to scale = 1. bijector = AffineScalar(shift=mu, scale=[2.,", "x_value}) for run in (static_run, dynamic_run): mu = -1. #", "One sample from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse,", "= AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2., upper_x=2.) if __name__ == \"__main__\":", "bijector = AffineScalar(shift=mu) x = [1., 1] # One sample", "may obtain a copy of the License at # #", "Univariate, two batches. # Corresponds to scale = 1. bijector", "return fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value) x =", "scale = 1. bijector = AffineScalar(shift=mu) x = np.float64([1.]) #", "[1., 1] # One sample from each of two batches.", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from tensorflow.python.framework import dtypes from", "run(bijector.inverse, x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self): with self.test_session()", "x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose([0., 0.], run(bijector.inverse_log_det_jacobian, x)) def", "Corresponds to scale = 2, shift = 0. bijector =", "__future__ import print_function import numpy as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import", "run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as sess: def static_run(fun,", "may not use this file except in compliance with the", "-1. # scale corresponds to 1. bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\",", "# Three scalar samples (no batches). self.assertAllClose([1., 3, 5], run(bijector.forward,", "(static_run, dynamic_run): mu = -1. # Corresponds to scale =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "print_function import numpy as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from", "this file except in compliance with the License. # You", "tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops.distributions.bijector_test_util import", "batches). self.assertAllClose([1., 3, 5], run(bijector.forward, x)) self.assertAllClose([1., 1.5, 2.], run(bijector.inverse,", "= 1. bijector = AffineScalar(shift=mu) x = [1., 1] #", "of the Y = scale @ x + shift transformation.\"\"\"", "import absolute_import from __future__ import division from __future__ import print_function", "= np.array(x_value) x = array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value})", "2], run(bijector.inverse, x)) self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self):", "x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as sess:", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with", "# # Licensed under the Apache License, Version 2.0 (the", "test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of the Y = scale", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "testNoBatchScalar(self): with self.test_session() as sess: def static_run(fun, x): return fun(x).eval()", "array_ops from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from tensorflow.python.platform import test class", "Three scalar samples (no batches). self.assertAllClose([1., 3, 5], run(bijector.forward, x))", "corresponds to 1. bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self):", "shift = 0. bijector = AffineScalar(scale=multiplier) x = np.float64([1.]) #", "tensorflow.python.ops import array_ops from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from tensorflow.python.platform import", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "two batches. # Corresponds to scale = 1. bijector =", "0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose( [-np.log(2), 0.],", "One batch, scalar. # Corresponds to scale = 1. bijector", "from __future__ import print_function import numpy as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar", "batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x))", "feed_dict={x: x_value}) for run in (static_run, dynamic_run): mu = [1.,", "import assert_scalar_congruency from tensorflow.python.platform import test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness", "bijector.name) def testNoBatchScalar(self): with self.test_session() as sess: def static_run(fun, x):", "Scalar Tests.\"\"\" from __future__ import absolute_import from __future__ import division", "np.float64([1.]) # One sample from one batches. self.assertAllClose([2.], run(bijector.forward, x))", "= AffineScalar(shift=mu) x = [1., 1] # One sample from", "import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency", "[-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self): with self.test_session(): bijector =", "self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def", "\"\"\"Affine Scalar Tests.\"\"\" from __future__ import absolute_import from __future__ import", "============================================================================== \"\"\"Affine Scalar Tests.\"\"\" from __future__ import absolute_import from __future__", "x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x))", "x_value): x_value = np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x),", "1.5, 2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian, x)) def", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "1. bijector = AffineScalar(shift=mu) x = np.float64([1.]) # One sample", "or implied. # See the License for the specific language", "Rights Reserved. # # Licensed under the Apache License, Version", "2016 The TensorFlow Authors. All Rights Reserved. # # Licensed", "np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "feed_dict={x: x_value}) for run in (static_run, dynamic_run): mu = np.float64([1.])", "in (static_run, dynamic_run): mu = -1. # Corresponds to scale", "x_value): x_value = np.array(x_value) x = array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x),", "one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian,", "One sample from each of two batches. self.assertAllClose([2., 0], run(bijector.forward,", "under the License. # ============================================================================== \"\"\"Affine Scalar Tests.\"\"\" from __future__", "self.assertAllClose([2., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse, x)) self.assertAllClose([0., 0.],", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "samples (no batches). self.assertAllClose([1., 3, 5], run(bijector.forward, x)) self.assertAllClose([1., 1.5,", "x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as sess: def static_run(fun, x):", "\"\"\"Tests correctness of the Y = scale @ x +", "import numpy as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from tensorflow.python.framework", "sample from each of two batches. self.assertAllClose([3., 0], run(bijector.forward, x))", "x = [1., 2, 3] # Three scalar samples (no", "(the \"License\"); # you may not use this file except", "self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session()", "# you may not use this file except in compliance", "def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as sess: def static_run(fun, x): return", "= 0. bijector = AffineScalar(scale=multiplier) x = np.float64([1.]) # One", "def testTwoBatchScalarIdentityViaScale(self): with self.test_session() as sess: def static_run(fun, x): return", "to scale = 2 bijector = AffineScalar(shift=mu, scale=2.) x =", "x)) def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as sess: def static_run(fun, x):", "each of two batches. self.assertAllClose([2., 0], run(bijector.forward, x)) self.assertAllClose([0., 2],", "# # Unless required by applicable law or agreed to", "and # limitations under the License. # ============================================================================== \"\"\"Affine Scalar", "AffineScalar(shift=mu, scale=2.) x = [1., 2, 3] # Three scalar", "testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as sess: def static_run(fun, x): return fun(x).eval()", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "x_value}) for run in (static_run, dynamic_run): mu = [1., -1]", "Version 2.0 (the \"License\"); # you may not use this", "self.test_session(): bijector = AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2., upper_x=2.) if __name__", "from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.],", "= AffineScalar(scale=multiplier) x = np.float64([1.]) # One sample from one", "run in (static_run, dynamic_run): mu = [1., -1] # Univariate,", "tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from tensorflow.python.platform import test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests", "bijector = AffineScalar(shift=mu, scale=2.) x = [1., 2, 3] #", "x): return fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value) x", "__future__ import absolute_import from __future__ import division from __future__ import", "implied. # See the License for the specific language governing", "2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self):", "each of two batches. self.assertAllClose([3., 0], run(bijector.forward, x)) self.assertAllClose([0., 2],", "under the Apache License, Version 2.0 (the \"License\"); # you", "0.], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaScale(self): with self.test_session() as sess: def", "by applicable law or agreed to in writing, software #", "to scale = 2, shift = 0. bijector = AffineScalar(scale=multiplier)", "scale=2.) x = [1., 2, 3] # Three scalar samples", "# One batch, scalar. # Corresponds to scale = 1.", "AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of the Y = scale @ x", "sample from each of two batches. self.assertAllClose([2., 0], run(bijector.forward, x))", "array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run,", "AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2., upper_x=2.) if __name__ == \"__main__\": test.main()", "1] # One sample from each of two batches. self.assertAllClose([3.,", "batches. # Corresponds to scale = 1. bijector = AffineScalar(shift=mu,", "of two batches. self.assertAllClose([2., 0], run(bijector.forward, x)) self.assertAllClose([0., 2], run(bijector.inverse,", "as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from tensorflow.python.framework import dtypes", "* 3, run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session() as sess:", "x)) self.assertAllClose([0.], run(bijector.inverse, x)) self.assertAllClose([0.], run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with", "the Y = scale @ x + shift transformation.\"\"\" def", "scalar samples (no batches). self.assertAllClose([1., 3, 5], run(bijector.forward, x)) self.assertAllClose([1.,", "x_value}) for run in (static_run, dynamic_run): multiplier = np.float64([2.]) #", "scale = 2, shift = 0. bijector = AffineScalar(scale=multiplier) x", "x)) self.assertAllClose([-np.log(2.)] * 3, run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesShiftOnly(self): with self.test_session()", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session()", "from each of two batches. self.assertAllClose([2., 0], run(bijector.forward, x)) self.assertAllClose([0.,", "x)) self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self): with self.test_session():", "the specific language governing permissions and # limitations under the", "sample from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.], run(bijector.inverse, x))", "# limitations under the License. # ============================================================================== \"\"\"Affine Scalar Tests.\"\"\"", "return fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value).astype(np.float64) x =", "AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self): with self.test_session() as sess: def", "x): return fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value).astype(np.float64) x", "applicable law or agreed to in writing, software # distributed", "dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from", "one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian,", "np.float64([1.]) # One batch, scalar. # Corresponds to scale =", "in writing, software # distributed under the License is distributed", "(static_run, dynamic_run): multiplier = np.float64([2.]) # One batch, scalar. #", "= scale @ x + shift transformation.\"\"\" def testProperties(self): with", "# Univariate, two batches. # Corresponds to scale = 1.", "AffineScalar(shift=mu) x = np.float64([1.]) # One sample from one batches.", "fun(x).eval() def dynamic_run(fun, x_value): x_value = np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "np.array(x_value) x = array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for", "self.assertEqual(\"affine_scalar\", bijector.name) def testNoBatchScalar(self): with self.test_session() as sess: def static_run(fun,", "License, Version 2.0 (the \"License\"); # you may not use", "batches. # Corresponds to scale = 1. bijector = AffineScalar(shift=mu)", "# You may obtain a copy of the License at", "to scale = 1. bijector = AffineScalar(shift=mu) x = [1.,", "feed_dict={x: x_value}) for run in (static_run, dynamic_run): mu = -1.", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self): with self.test_session(): bijector = AffineScalar(shift=3.6, scale=0.42)", "Authors. All Rights Reserved. # # Licensed under the Apache", "run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as", "from one batches. self.assertAllClose([2.], run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)],", "def testNoBatchScalar(self): with self.test_session() as sess: def static_run(fun, x): return", "class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of the Y = scale @", "assert_scalar_congruency from tensorflow.python.platform import test class AffineScalarBijectorTest(test.TestCase): \"\"\"Tests correctness of", "to scale = 1. bijector = AffineScalar(shift=mu, scale=[2., 1]) x", "the License for the specific language governing permissions and #", "Apache License, Version 2.0 (the \"License\"); # you may not", "run(bijector.inverse_log_det_jacobian, x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as sess: def static_run(fun,", "run(bijector.forward, x)) self.assertAllClose([1., 1.5, 2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)] * 3,", "from tensorflow.python.ops import array_ops from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency from tensorflow.python.platform", "either express or implied. # See the License for the", "scalar. # Corresponds to scale = 2, shift = 0.", "x_value = np.array(x_value) x = array_ops.placeholder(dtypes.float32, name=\"x\") return sess.run(fun(x), feed_dict={x:", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "numpy as np from tensorflow.contrib.distributions.python.ops.bijectors.affine_scalar import AffineScalar from tensorflow.python.framework import", "= 1. bijector = AffineScalar(shift=mu) x = np.float64([1.]) # One", "run(bijector.inverse, x)) self.assertAllClose( [-np.log(2), 0.], run(bijector.inverse_log_det_jacobian, x)) def testScalarCongruency(self): with", "= np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value})", "testScalarCongruency(self): with self.test_session(): bijector = AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2., upper_x=2.)", "run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as sess: def static_run(fun,", "= np.float64([2.]) # One batch, scalar. # Corresponds to scale", "the License. # ============================================================================== \"\"\"Affine Scalar Tests.\"\"\" from __future__ import", "Corresponds to scale = 1. bijector = AffineScalar(shift=mu) x =", "transformation.\"\"\" def testProperties(self): with self.test_session(): mu = -1. # scale", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "return sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run, dynamic_run): mu", "# Copyright 2016 The TensorFlow Authors. All Rights Reserved. #", "scale = 2 bijector = AffineScalar(shift=mu, scale=2.) x = [1.,", "3, 5], run(bijector.forward, x)) self.assertAllClose([1., 1.5, 2.], run(bijector.inverse, x)) self.assertAllClose([-np.log(2.)]", "# Corresponds to scale = 2, shift = 0. bijector", "batch, scalar. # Corresponds to scale = 1. bijector =", "absolute_import from __future__ import division from __future__ import print_function import", "AffineScalar from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from", "# One batch, scalar. # Corresponds to scale = 2,", "testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as sess: def static_run(fun, x): return fun(x).eval()", "x_value = np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x), feed_dict={x:", "= [1., 1] # One sample from each of two", "\"License\"); # you may not use this file except in", "= 2 bijector = AffineScalar(shift=mu, scale=2.) x = [1., 2,", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "dynamic_run(fun, x_value): x_value = np.array(x_value).astype(np.float64) x = array_ops.placeholder(dtypes.float64, name=\"x\") return", "= array_ops.placeholder(dtypes.float64, name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run in", "(static_run, dynamic_run): mu = [1., -1] # Univariate, two batches.", "# distributed under the License is distributed on an \"AS", "= np.float64([1.]) # One sample from one batches. self.assertAllClose([2.], run(bijector.forward,", "# scale corresponds to 1. bijector = AffineScalar(shift=mu) self.assertEqual(\"affine_scalar\", bijector.name)", "# Unless required by applicable law or agreed to in", "__future__ import division from __future__ import print_function import numpy as", "sess: def static_run(fun, x): return fun(x).eval() def dynamic_run(fun, x_value): x_value", "dynamic_run): multiplier = np.float64([2.]) # One batch, scalar. # Corresponds", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "1]) x = [1., 1] # One sample from each", "(no batches). self.assertAllClose([1., 3, 5], run(bijector.forward, x)) self.assertAllClose([1., 1.5, 2.],", "You may obtain a copy of the License at #", "x)) def testOneBatchScalarViaIdentityIn64BitUserProvidesScaleOnly(self): with self.test_session() as sess: def static_run(fun, x):", "permissions and # limitations under the License. # ============================================================================== \"\"\"Affine", "# Corresponds to scale = 1. bijector = AffineScalar(shift=mu) x", "run(bijector.forward, x)) self.assertAllClose([0.5], run(bijector.inverse, x)) self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self):", "bijector = AffineScalar(scale=multiplier) x = np.float64([1.]) # One sample from", "the Apache License, Version 2.0 (the \"License\"); # you may", "One sample from each of two batches. self.assertAllClose([3., 0], run(bijector.forward,", "name=\"x\") return sess.run(fun(x), feed_dict={x: x_value}) for run in (static_run, dynamic_run):", "Y = scale @ x + shift transformation.\"\"\" def testProperties(self):", "self.assertAllClose([np.log(0.5)], run(bijector.inverse_log_det_jacobian, x)) def testTwoBatchScalarIdentityViaIdentity(self): with self.test_session() as sess: def", "= -1. # Corresponds to scale = 2 bijector =", "with self.test_session(): bijector = AffineScalar(shift=3.6, scale=0.42) assert_scalar_congruency(bijector, lower_x=-2., upper_x=2.) if" ]
[ "node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def status_node(data_dir,", "'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False )) package_versions = list(map(semver_util.parse_version, package_keys)) latest_version", "os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz',", "is None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file = open(log_file_name, 'w')", "node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is None: algorand_indexer_args.extend([", "algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args", "current_kmd_config) def start_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'start',", "pid_file is None: algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name)", "= os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\",", "at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def", "channel == 'test': node_package_url = build_algo_release_url('node', 'stable', os_type, cpu_arch_type, node_package_version)", "algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string = [] for field", "'start', ] print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args,", "sort_keys=True, indent=4)}\") print(f\"Showing node configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\")", "kmd_dir, bin_dir=None): goal_args = [ 'node', 'restart', ] print(f\"Restarting node", "bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token',", "file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type()", "algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def start_indexer_remote_node(node, postgres,", "'--algod-token', node['token'], '--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file", "os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() if node_package_version == 'latest': if channel", "['algorand-indexer'] if not bin_dir is None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if", "'{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing", "with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def restart_node(data_dir, kmd_dir,", "postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '-d', node['data'],", "if log_file_name is None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file =", "start_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'start', ] print(f\"Starting", "None: algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def start_indexer_remote_node(node,", "goal_command.extend([ '-d', data_dir, '-k', kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command, check=True) def", "def build_indexer_postgress_connection_string(postgres): postgress_connection_string = [] for field in postgres.items(): postgress_connection_string.append(f\"{field[0]}={field[1]}\")", "cpu_arch_type, node_package_version) if channel == 'test': node_package_url = build_algo_release_url('node', 'stable',", "{data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def status_node(data_dir, kmd_dir, bin_dir=None):", "current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node configs at {node_config_path}", "of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def", "install algod. \"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir) bin_dir", "_ = urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir)", "{kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir,", "[f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir, '-k', kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command, check=True)", "= os_util.get_cpu_arch_type() if node_package_version == 'latest': if channel == 'test':", "goal_args, bin_dir) def restart_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node',", "[ 'node', 'stop', ] print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir,", ") else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) def show_node_configs(data_dir,", "= file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node configs at {node_config_path} with:\\n{json.dumps(node_config,", "algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string", "'stable', os_type, cpu_arch_type, node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url,", "sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir, kmd_dir, bin_dir=None):", "def stop_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'stop', ]", "bin_dir=None): goal_args = [ 'node', 'stop', ] print(f\"Stopping node with:\\n\\tdata_dir:", "def start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([", "os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False )) package_versions", "= file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}')", "bin_dir) def stop_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'stop',", "with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def configure_node(data_dir, kmd_dir, node_config, kmd_config): data_dir =", "build_algo_release_url('node', channel, os_type, cpu_arch_type, node_package_version) if channel == 'test': node_package_url", "= f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config", "os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() if node_package_version == 'latest':", "bin_dir) def restart_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'restart',", "os_type, cpu_arch_type): os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() package_keys =", "= [f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir, '-k', kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command,", "'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) else: file_util.copy_file( os.path.join(node_package_dir,", "def build_algo_release_url(package_type, channel, os_type, cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type,", "get_latest_package_version(package_type, channel, os_type, cpu_arch_type): os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type()", "import urllib.request from mule.util import os_util from mule.util import file_util", "goal_args, bin_dir) def goal(data_dir, kmd_dir, args, bin_dir=None): goal_command = ['goal']", "stop_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'stop', ] print(f\"Stopping", "== 'test': node_package_url = build_algo_release_url('node', 'stable', os_type, cpu_arch_type, node_package_version) node_package_tar_path", "node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir) os_type", "\"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir)", "cpu_arch_type) else: node_package_version = get_latest_package_version('node', channel, os_type, cpu_arch_type) print(f\"Installing {channel}", "current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing node configs at", "= [] for field in postgres.items(): postgress_connection_string.append(f\"{field[0]}={field[1]}\") return ' '.join(postgress_connection_string)", "not bin_dir is None: goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir,", "algorand_indexer_command = ['algorand-indexer'] if not bin_dir is None: algorand_indexer_command =", "{kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def restart_node(data_dir, kmd_dir, bin_dir=None): goal_args", "= [ 'node', 'stop', ] print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\")", "get_latest_package_version('node', channel, os_type, cpu_arch_type) print(f\"Installing {channel} node package version {node_package_version}", "node['token'], '--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is", "node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is None: algorand_indexer_args.extend([", "'latest': if channel == 'test': node_package_version = get_latest_package_version('node', 'stable', os_type,", "'genesis.json') ) def show_node_configs(data_dir, kmd_dir): data_dir = file_util.ensure_folder(data_dir) kmd_dir =", "build_indexer_postgress_connection_string(postgres): postgress_connection_string = [] for field in postgres.items(): postgress_connection_string.append(f\"{field[0]}={field[1]}\") return", "kmd_config): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\"", "algod. \"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir) bin_dir =", "sort_keys=True, indent=4)}\") def configure_node(data_dir, kmd_dir, node_config, kmd_config): data_dir = file_util.ensure_folder(data_dir)", "= ['daemon'] algorand_indexer_args.extend([ '-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not", "current_kmd_config.update(kmd_config) print(f\"Updating node configs at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating", "stdout=log_file, stderr=log_file) def start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args =", "of package type {package_type} for channel {channel}: {latest_version}\") return latest_version", "build_algo_release_url(package_type, channel, os_type, cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel,", "= f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config", "'node', 'stop', ] print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir,", "f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel, os_type, cpu_arch_type): os_type = os_util.get_os_type() cpu_arch_type", "]) if not pid_file is None: algorand_indexer_args.extend([ '--pidfile', pid_file ])", "'tar.gz', s3_auth=False )) package_versions = list(map(semver_util.parse_version, package_keys)) latest_version = semver_util.get_highest_version(package_versions)", "import subprocess import json import urllib.request from mule.util import os_util", "indent=4)}\") print(f\"Showing node configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def", "'-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is None:", "with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating node configs at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True,", "channel {channel}: {latest_version}\") return latest_version def install_node(data_dir, bin_dir, channel, node_package_version='latest'):", "package_versions = list(map(semver_util.parse_version, package_keys)) latest_version = semver_util.get_highest_version(package_versions) print(f\"Found latest version", "latest_version = semver_util.get_highest_version(package_versions) print(f\"Found latest version of package type {package_type}", "node_package_version) if channel == 'test': node_package_url = build_algo_release_url('node', 'stable', os_type,", "def algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command = ['algorand-indexer'] if not bin_dir", "goal_args = [ 'node', 'restart', ] print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir:", "package version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url = build_algo_release_url('node', channel,", "node_package_version='latest'): \"\"\" Download and install algod. \"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\")", "configure_node(data_dir, kmd_dir, node_config, kmd_config): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir)", "[ 'node', 'status', ] print(f\"Status of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\")", "is None: algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def", "file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() if node_package_version ==", "is None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if log_file_name is None: log_file_name", "algorand_indexer_args.extend([ '-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is", "channel, os_type, cpu_arch_type) print(f\"Installing {channel} node package version {node_package_version} to:\\n\\tbin_dir:", "channel, node_package_version='latest'): \"\"\" Download and install algod. \"\"\" node_package_dir =", "list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False )) package_versions = list(map(semver_util.parse_version, package_keys))", "if not bin_dir is None: goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([ '-d',", "goal(data_dir, kmd_dir, args, bin_dir=None): goal_command = ['goal'] if not bin_dir", "node_config, kmd_config): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path =", "'restart', ] print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args,", "package type {package_type} for channel {channel}: {latest_version}\") return latest_version def", "print(f\"Updating node configs at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating node", "= [ 'node', 'status', ] print(f\"Status of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir:", "log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file = open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file,", "sort_keys=True, indent=4)}\") print(f\"Updating node configs at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\")", "f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) def show_node_configs(data_dir, kmd_dir): data_dir = file_util.ensure_folder(data_dir)", "['goal'] if not bin_dir is None: goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([", "file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config)", "channel, os_type, cpu_arch_type): os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() package_keys", "log_file_name) def start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon']", "'--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if", "if channel == 'test': node_package_version = get_latest_package_version('node', 'stable', os_type, cpu_arch_type)", "= build_algo_release_url('node', channel, os_type, cpu_arch_type, node_package_version) if channel == 'test':", "os_type, cpu_arch_type) else: node_package_version = get_latest_package_version('node', channel, os_type, cpu_arch_type) print(f\"Installing", "f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False )) package_versions = list(map(semver_util.parse_version, package_keys)) latest_version =", "f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel == 'stable': file_util.copy_file(", "file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node configs at", "at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing node configs at {kmd_config_path}", "file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path,", "node_package_version == 'latest': if channel == 'test': node_package_version = get_latest_package_version('node',", "file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"),", "status_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'status', ] print(f\"Status", "indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir, kmd_dir, bin_dir=None): goal_args", "mule.util import s3_util from mule.util import semver_util import platform def", "is None: goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir, '-k', kmd_dir,", "<reponame>bricerisingalgorand/mule import os import subprocess import json import urllib.request from", "bin_dir=None): goal_args = [ 'node', 'start', ] print(f\"Starting node with:\\n\\tdata_dir:", "= ['algorand-indexer'] if not bin_dir is None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"]", "== 'test': node_package_version = get_latest_package_version('node', 'stable', os_type, cpu_arch_type) else: node_package_version", "= file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() if node_package_version", "bin_dir=None, log_file_name=None): algorand_indexer_command = ['algorand-indexer'] if not bin_dir is None:", "file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node',", "goal_args, bin_dir) def stop_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node',", "os_type, cpu_arch_type, node_package_version) if channel == 'test': node_package_url = build_algo_release_url('node',", "f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config =", "goal(data_dir, kmd_dir, goal_args, bin_dir) def stop_node(data_dir, kmd_dir, bin_dir=None): goal_args =", "= semver_util.get_highest_version(package_versions) print(f\"Found latest version of package type {package_type} for", "cpu_arch_type, node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path,", "import semver_util import platform def build_algo_release_url(package_type, channel, os_type, cpu_arch_type, package_version):", "'{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing node configs", "'status', ] print(f\"Status of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir,", "goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir, '-k', kmd_dir, ]) goal_command.extend(args)", "print(f\"Found latest version of package type {package_type} for channel {channel}:", "node configs at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path,", "cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel, os_type, cpu_arch_type): os_type", "{kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def status_node(data_dir, kmd_dir, bin_dir=None): goal_args", "with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing node configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True,", "{channel} node package version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url =", "= file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type() cpu_arch_type =", "get_latest_package_version('node', 'stable', os_type, cpu_arch_type) else: node_package_version = get_latest_package_version('node', channel, os_type,", "] print(f\"Status of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args,", "cpu_arch_type): os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys(", "channel, os_type, cpu_arch_type, node_package_version) if channel == 'test': node_package_url =", "configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def configure_node(data_dir, kmd_dir, node_config,", "def start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([", "print(f\"Installing {channel} node package version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url", "bin_dir) def status_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'status',", "bin_dir) def goal(data_dir, kmd_dir, args, bin_dir=None): goal_command = ['goal'] if", "kmd_dir, goal_args, bin_dir) def goal(data_dir, kmd_dir, args, bin_dir=None): goal_command =", "= file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir) os_type =", "== 'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) else: file_util.copy_file(", "= file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node configs", "with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir, kmd_dir,", "install_node(data_dir, bin_dir, channel, node_package_version='latest'): \"\"\" Download and install algod. \"\"\"", "'test': node_package_version = get_latest_package_version('node', 'stable', os_type, cpu_arch_type) else: node_package_version =", "print(f\"Showing node configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def configure_node(data_dir,", "return latest_version def install_node(data_dir, bin_dir, channel, node_package_version='latest'): \"\"\" Download and", "os.path.join(data_dir, 'genesis.json') ) def show_node_configs(data_dir, kmd_dir): data_dir = file_util.ensure_folder(data_dir) kmd_dir", "kmd_dir, goal_args, bin_dir) def stop_node(data_dir, kmd_dir, bin_dir=None): goal_args = [", "None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file = open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command,", "os_util.get_cpu_arch_type() if node_package_version == 'latest': if channel == 'test': node_package_version", "file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel == 'stable':", "= os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False ))", "file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing node", "mule.util import time_util from mule.util import s3_util from mule.util import", "channel, os_type, cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel, os_type,", "{node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing node configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config,", "= os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() if node_package_version == 'latest': if", "if channel == 'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') )", "build_indexer_postgress_connection_string(postgres) ]) if not pid_file is None: algorand_indexer_args.extend([ '--pidfile', pid_file", "'node', 'status', ] print(f\"Status of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir,", "{channel}: {latest_version}\") return latest_version def install_node(data_dir, bin_dir, channel, node_package_version='latest'): \"\"\"", "]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string = [] for", "else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) def show_node_configs(data_dir, kmd_dir):", "kmd_dir, args, bin_dir=None): goal_command = ['goal'] if not bin_dir is", "def get_latest_package_version(package_type, channel, os_type, cpu_arch_type): os_type = os_util.get_os_type() cpu_arch_type =", "bin_dir=None): goal_args = [ 'node', 'status', ] print(f\"Status of node", "bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '-d', node['data'], '--postgres',", "] print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir)", "{data_dir}\") node_package_url = build_algo_release_url('node', channel, os_type, cpu_arch_type, node_package_version) if channel", "to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url = build_algo_release_url('node', channel, os_type, cpu_arch_type, node_package_version)", "f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file = open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def", "platform def build_algo_release_url(package_type, channel, os_type, cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def", "import file_util from mule.util import time_util from mule.util import s3_util", "node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def goal(data_dir,", "'node', 'restart', ] print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir,", "node_package_url = build_algo_release_url('node', 'stable', os_type, cpu_arch_type, node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\"", "print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def", "os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) def show_node_configs(data_dir, kmd_dir): data_dir =", "kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path,", "list(map(semver_util.parse_version, package_keys)) latest_version = semver_util.get_highest_version(package_versions) print(f\"Found latest version of package", "configs at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating node configs at", "kmd_dir, goal_args, bin_dir) def status_node(data_dir, kmd_dir, bin_dir=None): goal_args = [", "semver_util.get_highest_version(package_versions) print(f\"Found latest version of package type {package_type} for channel", "os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir,", "time_util from mule.util import s3_util from mule.util import semver_util import", "{node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating node configs at {kmd_config_path} with:\\n{json.dumps(kmd_config,", "mule.util import file_util from mule.util import time_util from mule.util import", "file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel == 'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir,", "bin_dir = file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() if", "cpu_arch_type) print(f\"Installing {channel} node package version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\")", "file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel == 'stable': file_util.copy_file( os.path.join(node_package_dir,", "print(f\"Status of node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir)", "goal_args = [ 'node', 'start', ] print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir:", "urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel", "import platform def build_algo_release_url(package_type, channel, os_type, cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\"", "bin_dir=None): goal_command = ['goal'] if not bin_dir is None: goal_command", "from mule.util import semver_util import platform def build_algo_release_url(package_type, channel, os_type,", "= file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing node configs at {node_config_path}", "kmd_dir, goal_args, bin_dir) def restart_node(data_dir, kmd_dir, bin_dir=None): goal_args = [", "os_type, cpu_arch_type) print(f\"Installing {channel} node package version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir:", "goal_args, bin_dir) def status_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node',", "def configure_node(data_dir, kmd_dir, node_config, kmd_config): data_dir = file_util.ensure_folder(data_dir) kmd_dir =", "Download and install algod. \"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir =", "from mule.util import os_util from mule.util import file_util from mule.util", "if not pid_file is None: algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args,", "restart_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'restart', ] print(f\"Restarting", "[ 'node', 'restart', ] print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir,", "log_file = open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def start_indexer_local_node(node, postgres,", "stderr=log_file) def start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon']", "] print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir)", "print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def", "node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel ==", "start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '--algod-net',", "['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres)", "with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def stop_node(data_dir, kmd_dir,", "open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None,", "node_package_url = build_algo_release_url('node', channel, os_type, cpu_arch_type, node_package_version) if channel ==", "def show_node_configs(data_dir, kmd_dir): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path", "package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False )) package_versions =", "= [ 'node', 'start', ] print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\")", "semver_util import platform def build_algo_release_url(package_type, channel, os_type, cpu_arch_type, package_version): return", "== 'latest': if channel == 'test': node_package_version = get_latest_package_version('node', 'stable',", "version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url = build_algo_release_url('node', channel, os_type,", "pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'],", "file_util from mule.util import time_util from mule.util import s3_util from", "= urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if", "file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node configs at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True,", "= list(map(semver_util.parse_version, package_keys)) latest_version = semver_util.get_highest_version(package_versions) print(f\"Found latest version of", "log_file_name=None): algorand_indexer_command = ['algorand-indexer'] if not bin_dir is None: algorand_indexer_command", "check=True) def algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command = ['algorand-indexer'] if not", "'-k', kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command, check=True) def algorand_indexer(args, bin_dir=None, log_file_name=None):", "cpu_arch_type = os_util.get_cpu_arch_type() if node_package_version == 'latest': if channel ==", "os.path.join(data_dir, 'genesis.json') ) else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') )", "kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path)", "f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config =", "show_node_configs(data_dir, kmd_dir): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path =", "def goal(data_dir, kmd_dir, args, bin_dir=None): goal_command = ['goal'] if not", "'{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config)", "bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string = [] for field in", "data_dir = file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type() cpu_arch_type", "= ['goal'] if not bin_dir is None: goal_command = [f\"{bin_dir}/goal\"]", "algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ])", ") def show_node_configs(data_dir, kmd_dir): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir)", "print(f\"Updating node configs at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config)", "cpu_arch_type = os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False", "node configs at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def configure_node(data_dir, kmd_dir,", "file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing node configs at {node_config_path} with:\\n{json.dumps(current_node_config,", "if node_package_version == 'latest': if channel == 'test': node_package_version =", "s3_auth=False )) package_versions = list(map(semver_util.parse_version, package_keys)) latest_version = semver_util.get_highest_version(package_versions) print(f\"Found", "{package_type} for channel {channel}: {latest_version}\") return latest_version def install_node(data_dir, bin_dir,", "{data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def stop_node(data_dir, kmd_dir, bin_dir=None):", "start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '-d',", "subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args", "'stop', ] print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args,", "file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) def show_node_configs(data_dir, kmd_dir): data_dir", "if not bin_dir is None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if log_file_name", "return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel, os_type, cpu_arch_type): os_type = os_util.get_os_type()", "algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if", "f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not", "package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel, os_type, cpu_arch_type): os_type =", "{kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def goal(data_dir, kmd_dir, args, bin_dir=None):", "bin_dir is None: goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir, '-k',", "os import subprocess import json import urllib.request from mule.util import", "{data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def restart_node(data_dir, kmd_dir, bin_dir=None):", "log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string = [] for field in postgres.items():", "goal_args = [ 'node', 'stop', ] print(f\"Stopping node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir:", "os_type, cpu_arch_type, package_version): return f\"https://algorand-releases.s3.amazonaws.com/channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_{package_version}.tar.gz\" def get_latest_package_version(package_type, channel, os_type, cpu_arch_type):", "= f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir)", "node_package_version = get_latest_package_version('node', channel, os_type, cpu_arch_type) print(f\"Installing {channel} node package", "not pid_file is None: algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir,", "algorand_indexer_command.extend(args) log_file = open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def start_indexer_local_node(node,", "= build_algo_release_url('node', 'stable', os_type, cpu_arch_type, node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _", "node_config_path = f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\" file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}')", "data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\" kmd_config_path", "file_util.read_json_file(kmd_config_path) print(f\"Showing node configs at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing", "node_package_version = get_latest_package_version('node', 'stable', os_type, cpu_arch_type) else: node_package_version = get_latest_package_version('node',", "{latest_version}\") return latest_version def install_node(data_dir, bin_dir, channel, node_package_version='latest'): \"\"\" Download", "configs at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing node configs at", "]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None):", "print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def", "pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string = []", "import os import subprocess import json import urllib.request from mule.util", "postgress_connection_string = [] for field in postgres.items(): postgress_connection_string.append(f\"{field[0]}={field[1]}\") return '", "node configs at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating node configs", "{kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def configure_node(data_dir, kmd_dir, node_config, kmd_config): data_dir", "import s3_util from mule.util import semver_util import platform def build_algo_release_url(package_type,", "data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\", bin_dir) if channel == 'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"),", "= get_latest_package_version('node', 'stable', os_type, cpu_arch_type) else: node_package_version = get_latest_package_version('node', channel,", "indent=4)}\") def configure_node(data_dir, kmd_dir, node_config, kmd_config): data_dir = file_util.ensure_folder(data_dir) kmd_dir", "None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if log_file_name is None: log_file_name =", "os_type, cpu_arch_type, node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url, node_package_tar_path)", "kmd_dir, node_config, kmd_config): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path", "current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir, kmd_dir, bin_dir=None): goal_args = [", "file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config) def start_node(data_dir, kmd_dir, bin_dir=None): goal_args =", "configs at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path, current_node_config) file_util.write_json_file(kmd_config_path, current_kmd_config)", "indent=4)}\") print(f\"Updating node configs at {kmd_config_path} with:\\n{json.dumps(kmd_config, sort_keys=True, indent=4)}\") file_util.write_json_file(node_config_path,", "with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def goal(data_dir, kmd_dir,", "= f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file = open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file)", "= file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\" kmd_config_path =", "mule.util import os_util from mule.util import file_util from mule.util import", "log_file_name is None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file = open(log_file_name,", ")) package_versions = list(map(semver_util.parse_version, package_keys)) latest_version = semver_util.get_highest_version(package_versions) print(f\"Found latest", "[f\"{bin_dir}/algorand-indexer\"] if log_file_name is None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args) log_file", "from mule.util import file_util from mule.util import time_util from mule.util", "latest_version def install_node(data_dir, bin_dir, channel, node_package_version='latest'): \"\"\" Download and install", "args, bin_dir=None): goal_command = ['goal'] if not bin_dir is None:", "node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def stop_node(data_dir,", "= get_latest_package_version('node', channel, os_type, cpu_arch_type) print(f\"Installing {channel} node package version", "def start_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'start', ]", "mule.util import semver_util import platform def build_algo_release_url(package_type, channel, os_type, cpu_arch_type,", "with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def status_node(data_dir, kmd_dir,", "'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def start_indexer_local_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None):", "{kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def stop_node(data_dir, kmd_dir, bin_dir=None): goal_args", "= ['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis', node['genesis'], '--postgres',", "node package version {node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url = build_algo_release_url('node',", "file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir) bin_dir = file_util.ensure_folder(bin_dir) os_type = os_util.get_os_type()", "]) goal_command.extend(args) subprocess.run(goal_command, check=True) def algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command =", "'--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is None: algorand_indexer_args.extend([ '--pidfile',", "from mule.util import time_util from mule.util import s3_util from mule.util", "and install algod. \"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir = file_util.ensure_folder(data_dir)", "kmd_dir, bin_dir=None): goal_args = [ 'node', 'status', ] print(f\"Status of", "bin_dir is None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if log_file_name is None:", "type {package_type} for channel {channel}: {latest_version}\") return latest_version def install_node(data_dir,", "[ 'node', 'start', ] print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir,", "'--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres): postgress_connection_string =", "f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\", data_dir) file_util.mv_folder_contents(f\"{node_package_dir}/bin\",", "{node_package_version} to:\\n\\tbin_dir: {bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url = build_algo_release_url('node', channel, os_type, cpu_arch_type,", "not bin_dir is None: algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if log_file_name is", "else: node_package_version = get_latest_package_version('node', channel, os_type, cpu_arch_type) print(f\"Installing {channel} node", "node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\")", "node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _ = urllib.request.urlretrieve(node_package_url, node_package_tar_path) file_util.decompressTarfile(node_package_tar_path, f\"{node_package_dir}\") file_util.mv_folder_contents(f\"{node_package_dir}/data\",", "kmd_dir): data_dir = file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\"", "version of package type {package_type} for channel {channel}: {latest_version}\") return", "file_util.ensure_folder(data_dir) kmd_dir = file_util.ensure_folder(kmd_dir) node_config_path = f\"{data_dir}/config.json\" kmd_config_path = f\"{kmd_dir}/kmd_config.json\"", "import os_util from mule.util import file_util from mule.util import time_util", "\"\"\" Download and install algod. \"\"\" node_package_dir = file_util.ensure_folder(f\"/tmp/algod-pkg-{time_util.get_timestamp()}\") data_dir", "def restart_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'restart', ]", "log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ])", "bin_dir=None): goal_args = [ 'node', 'restart', ] print(f\"Restarting node with:\\n\\tdata_dir:", "channel == 'test': node_package_version = get_latest_package_version('node', 'stable', os_type, cpu_arch_type) else:", "for channel {channel}: {latest_version}\") return latest_version def install_node(data_dir, bin_dir, channel,", "build_algo_release_url('node', 'stable', os_type, cpu_arch_type, node_package_version) node_package_tar_path = f\"{node_package_dir}/node_package.tar.gz\" _ =", "bin_dir) if channel == 'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json')", "subprocess.run(goal_command, check=True) def algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command = ['algorand-indexer'] if", "current_kmd_config = file_util.read_json_file(kmd_config_path) print(f\"Showing node configs at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True,", "'-d', data_dir, '-k', kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command, check=True) def algorand_indexer(args,", "algorand_indexer_command = [f\"{bin_dir}/algorand-indexer\"] if log_file_name is None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\"", "= open(log_file_name, 'w') subprocess.Popen(algorand_indexer_command, stdout=log_file, stderr=log_file) def start_indexer_local_node(node, postgres, bin_dir=None,", "postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\",", "goal(data_dir, kmd_dir, goal_args, bin_dir) def restart_node(data_dir, kmd_dir, bin_dir=None): goal_args =", "= [f\"{bin_dir}/algorand-indexer\"] if log_file_name is None: log_file_name = f\"indexer-{time_util.get_timestamp()}.log\" algorand_indexer_command.extend(args)", "pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None,", "latest version of package type {package_type} for channel {channel}: {latest_version}\")", "kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command, check=True) def algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command", "goal_command.extend(args) subprocess.run(goal_command, check=True) def algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command = ['algorand-indexer']", "def status_node(data_dir, kmd_dir, bin_dir=None): goal_args = [ 'node', 'status', ]", "print(f\"Showing node configs at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing node", "import json import urllib.request from mule.util import os_util from mule.util", "urllib.request from mule.util import os_util from mule.util import file_util from", "goal(data_dir, kmd_dir, goal_args, bin_dir) def status_node(data_dir, kmd_dir, bin_dir=None): goal_args =", "goal(data_dir, kmd_dir, goal_args, bin_dir) def goal(data_dir, kmd_dir, args, bin_dir=None): goal_command", "['daemon'] algorand_indexer_args.extend([ '-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file", "kmd_dir, bin_dir=None): goal_args = [ 'node', 'start', ] print(f\"Starting node", "'{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating", "= file_util.read_json_file(kmd_config_path) print(f\"Showing node configs at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\")", "'--genesis', node['genesis'], '--postgres', build_indexer_postgress_connection_string(postgres) ]) if not pid_file is None:", "import time_util from mule.util import s3_util from mule.util import semver_util", "'genesis.json') ) else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) def", "pid_file=None, log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '-d', node['data'], '--postgres', build_indexer_postgress_connection_string(postgres)", "if channel == 'test': node_package_url = build_algo_release_url('node', 'stable', os_type, cpu_arch_type,", "algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis', node['genesis'],", "log_file_name=None): algorand_indexer_args = ['daemon'] algorand_indexer_args.extend([ '--algod-net', f\"{node['host']}:{node['port']}\", '--algod-token', node['token'], '--genesis',", "'stable', os_type, cpu_arch_type) else: node_package_version = get_latest_package_version('node', channel, os_type, cpu_arch_type)", "] print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir)", "algorand_indexer(args, bin_dir=None, log_file_name=None): algorand_indexer_command = ['algorand-indexer'] if not bin_dir is", "current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node configs at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\")", "bin_dir, log_file_name) def start_indexer_remote_node(node, postgres, bin_dir=None, pid_file=None, log_file_name=None): algorand_indexer_args =", "os_type = os_util.get_os_type() cpu_arch_type = os_util.get_cpu_arch_type() package_keys = list(s3_util.get_matching_s3_keys( 'algorand-releases',", "at {kmd_config_path} with:\\n{json.dumps(current_kmd_config, sort_keys=True, indent=4)}\") def configure_node(data_dir, kmd_dir, node_config, kmd_config):", "= [ 'node', 'restart', ] print(f\"Restarting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\")", "json import urllib.request from mule.util import os_util from mule.util import", "def install_node(data_dir, bin_dir, channel, node_package_version='latest'): \"\"\" Download and install algod.", "current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path) current_node_config.update(node_config) current_kmd_config.update(kmd_config) print(f\"Updating node", "bin_dir, channel, node_package_version='latest'): \"\"\" Download and install algod. \"\"\" node_package_dir", "node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def restart_node(data_dir,", "from mule.util import s3_util from mule.util import semver_util import platform", "package_keys)) latest_version = semver_util.get_highest_version(package_versions) print(f\"Found latest version of package type", "channel == 'stable': file_util.copy_file( os.path.join(node_package_dir, \"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) else:", "at {node_config_path} with:\\n{json.dumps(node_config, sort_keys=True, indent=4)}\") print(f\"Updating node configs at {kmd_config_path}", "None: goal_command = [f\"{bin_dir}/goal\"] goal_command.extend([ '-d', data_dir, '-k', kmd_dir, ])", "goal_args = [ 'node', 'status', ] print(f\"Status of node with:\\n\\tdata_dir:", "file_util.ensure_file(node_config_path, '{}') file_util.ensure_file(kmd_config_path, '{}') current_node_config = file_util.read_json_file(node_config_path) current_kmd_config = file_util.read_json_file(kmd_config_path)", "'test': node_package_url = build_algo_release_url('node', 'stable', os_type, cpu_arch_type, node_package_version) node_package_tar_path =", "= list(s3_util.get_matching_s3_keys( 'algorand-releases', f\"channel/{channel}/{package_type}_{channel}_{os_type}-{cpu_arch_type}_\", 'tar.gz', s3_auth=False )) package_versions = list(map(semver_util.parse_version,", "\"genesis/mainnet/genesis.json\"), os.path.join(data_dir, 'genesis.json') ) else: file_util.copy_file( os.path.join(node_package_dir, f\"genesis/{channel}net/genesis.json\"), os.path.join(data_dir, 'genesis.json')", "kmd_dir, bin_dir=None): goal_args = [ 'node', 'stop', ] print(f\"Stopping node", "node configs at {node_config_path} with:\\n{json.dumps(current_node_config, sort_keys=True, indent=4)}\") print(f\"Showing node configs", "s3_util from mule.util import semver_util import platform def build_algo_release_url(package_type, channel,", "subprocess import json import urllib.request from mule.util import os_util from", "'--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def start_indexer_remote_node(node, postgres, bin_dir=None,", "{bin_dir}\\n\\tdata_dir: {data_dir}\") node_package_url = build_algo_release_url('node', channel, os_type, cpu_arch_type, node_package_version) if", "goal_command = ['goal'] if not bin_dir is None: goal_command =", "data_dir, '-k', kmd_dir, ]) goal_command.extend(args) subprocess.run(goal_command, check=True) def algorand_indexer(args, bin_dir=None,", "None: algorand_indexer_args.extend([ '--pidfile', pid_file ]) algorand_indexer(algorand_indexer_args, bin_dir, log_file_name) def build_indexer_postgress_connection_string(postgres):", "os_util from mule.util import file_util from mule.util import time_util from", "{data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir, goal_args, bin_dir) def goal(data_dir, kmd_dir, args,", "'node', 'start', ] print(f\"Starting node with:\\n\\tdata_dir: {data_dir}\\n\\tkmd_dir: {kmd_dir}\") goal(data_dir, kmd_dir," ]
[ "absolute or relative size and the superscrollpanel will fill it", "if needed. \"\"\" from pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.ScrollPanel import", "vertical scrollbars if needed. \"\"\" from pyjamas.ui.SimplePanel import SimplePanel from", "a parent widget of unknown size. To avoid this problem", "and vertical scrolling position, respectively. It is in the nature", "import ScrollPanel from pyjamas.ui.HTML import HTML from pyjamas.ui.VerticalPanel import VerticalPanel", "HTML from pyjamas.ui.VerticalPanel import VerticalPanel class ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self)", "out and apply vertical scrollbars if needed. \"\"\" from pyjamas.ui.SimplePanel", "be told of is not an unvarying \" + \"way;<p>The", "\"Heaven and Earth sprang;<p>The named is but the \" +", "Way that can be told of is not an unvarying", "ScrollPanel from pyjamas.ui.HTML import HTML from pyjamas.ui.VerticalPanel import VerticalPanel class", "pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.ScrollPanel import ScrollPanel from pyjamas.ui.HTML import", "to change the horizontal and vertical scrolling position, respectively. It", "relative size and the superscrollpanel will fill it out and", "SuperScrollPanel in Showcase.css! \") panel2 = SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class", "fill out a parent widget of unknown size. To avoid", "it a relative size, it will not work. This makes", "will fill it out and apply vertical scrollbars if needed.", "a scrollpanel that if you give it a relative size,", "can be told of is not an unvarying \" +", "its behaviour as shown in the second example: \"container\" represents", "fill it out and apply vertical scrollbars if needed. \"\"\"", "``setScrollHorizontalPosition(hPos)`` to change the horizontal and vertical scrolling position, respectively.", "parent widget of unknown size. To avoid this problem you", "\"\"\" from pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.ScrollPanel import ScrollPanel from", "HTML(\"<b>Tao Te Ching, Chapter One</b><p>\" + \"The Way that can", "scroll bars to be always visible, call ``setAlwaysShowScrollBars(True)``. You can", "told of is not an unvarying \" + \"way;<p>The names", "needed. \"\"\" from pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.ScrollPanel import ScrollPanel", "be named are not \" + \"unvarying names.<p>It was from", "= SimplePanel(Width=\"400px\", Height=\"200px\") contents2 = HTML(50*\"Dont forget to grab the", "it where you want it to fill out a parent", "respectively. It is in the nature of a scrollpanel that", "vert.add(panel) container = SimplePanel(Width=\"400px\", Height=\"200px\") contents2 = HTML(50*\"Dont forget to", "container = SimplePanel(Width=\"400px\", Height=\"200px\") contents2 = HTML(50*\"Dont forget to grab", "its contents. If you want the scroll bars to be", "HTML(50*\"Dont forget to grab the css for SuperScrollPanel in Showcase.css!", "in the nature of a scrollpanel that if you give", "are not \" + \"unvarying names.<p>It was from the Nameless", "an unvarying \" + \"way;<p>The names that can be named", "SimplePanel(Width=\"400px\", Height=\"200px\") contents2 = HTML(50*\"Dont forget to grab the css", "panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\") self.inner = SimplePanel(Height=\"100%\") self.add(self.inner) self.inner.setStyleName(\"SuperScrollPanelInner\") self.inner.add(panel)", "contents. If you want the scroll bars to be always", "\" + \"way;<p>The names that can be named are not", "panel.add(contents) vert.add(panel) container = SimplePanel(Width=\"400px\", Height=\"200px\") contents2 = HTML(50*\"Dont forget", "= SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel): def __init__(self, panel): ScrollPanel.__init__(self)", "be always visible, call ``setAlwaysShowScrollBars(True)``. You can also change the", "apply vertical scrollbars if needed. \"\"\" from pyjamas.ui.SimplePanel import SimplePanel", "a relative size, it will not work. This makes it", "rears the ten thousand creatures, \" + \"each after its", "superscrollpanel will fill it out and apply vertical scrollbars if", "want it to fill out a parent widget of unknown", "unknown size. To avoid this problem you will have to", "could be any absolute or relative size and the superscrollpanel", "+ \"each after its kind.\") panel.add(contents) vert.add(panel) container = SimplePanel(Width=\"400px\",", "\"100px\")) contents = HTML(\"<b>Tao Te Ching, Chapter One</b><p>\" + \"The", "\"mother that rears the ten thousand creatures, \" + \"each", "\"each after its kind.\") panel.add(contents) vert.add(panel) container = SimplePanel(Width=\"400px\", Height=\"200px\")", "SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel): def __init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\")", "use it where you want it to fill out a", "it to fill out a parent widget of unknown size.", "named is but the \" + \"mother that rears the", "+ \"mother that rears the ten thousand creatures, \" +", "panel2 = SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel): def __init__(self, panel):", "represents the parent widget that could be any absolute or", "the css for SuperScrollPanel in Showcase.css! \") panel2 = SuperScrollPanel(contents2)", "import HTML from pyjamas.ui.VerticalPanel import VerticalPanel class ScrollPanelDemo(SimplePanel): def __init__(self):", "scrolling position programmatically by calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to change", "= HTML(\"<b>Tao Te Ching, Chapter One</b><p>\" + \"The Way that", "is not an unvarying \" + \"way;<p>The names that can", "``ui.ScrollPanel`` class implements a panel that scrolls its contents. If", "change the current scrolling position programmatically by calling ``setScrollPosition(vPos)`` and", "of unknown size. To avoid this problem you will have", "from the Nameless that \" + \"Heaven and Earth sprang;<p>The", "\"unvarying names.<p>It was from the Nameless that \" + \"Heaven", "kind.\") panel.add(contents) vert.add(panel) container = SimplePanel(Width=\"400px\", Height=\"200px\") contents2 = HTML(50*\"Dont", "calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to change the horizontal and vertical", "to control its behaviour as shown in the second example:", "from pyjamas.ui.ScrollPanel import ScrollPanel from pyjamas.ui.HTML import HTML from pyjamas.ui.VerticalPanel", "\"The Way that can be told of is not an", "content in a SimplePanel and then use css/oveflow to control", "second example: \"container\" represents the parent widget that could be", "You can also change the current scrolling position programmatically by", "pyjamas.ui.ScrollPanel import ScrollPanel from pyjamas.ui.HTML import HTML from pyjamas.ui.VerticalPanel import", "wrap its content in a SimplePanel and then use css/oveflow", "vert = VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel = ScrollPanel(Size=(\"300px\", \"100px\")) contents", "class ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self) vert = VerticalPanel() vert.setSpacing(\"10px\") self.add(vert)", "is but the \" + \"mother that rears the ten", "SimplePanel from pyjamas.ui.ScrollPanel import ScrollPanel from pyjamas.ui.HTML import HTML from", "widget of unknown size. To avoid this problem you will", "and then use css/oveflow to control its behaviour as shown", "unvarying \" + \"way;<p>The names that can be named are", "It is in the nature of a scrollpanel that if", "container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel): def __init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\")", "problem you will have to wrap its content in a", "def __init__(self): SimplePanel.__init__(self) vert = VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel =", "nature of a scrollpanel that if you give it a", "the current scrolling position programmatically by calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)``", "in Showcase.css! \") panel2 = SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel):", "+ \"Heaven and Earth sprang;<p>The named is but the \"", "scrollbars if needed. \"\"\" from pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.ScrollPanel", "change the horizontal and vertical scrolling position, respectively. It is", "bars to be always visible, call ``setAlwaysShowScrollBars(True)``. You can also", "the superscrollpanel will fill it out and apply vertical scrollbars", "and Earth sprang;<p>The named is but the \" + \"mother", "have to wrap its content in a SimplePanel and then", "the ten thousand creatures, \" + \"each after its kind.\")", "but the \" + \"mother that rears the ten thousand", "from pyjamas.ui.SimplePanel import SimplePanel from pyjamas.ui.ScrollPanel import ScrollPanel from pyjamas.ui.HTML", "Chapter One</b><p>\" + \"The Way that can be told of", "of is not an unvarying \" + \"way;<p>The names that", "this problem you will have to wrap its content in", "after its kind.\") panel.add(contents) vert.add(panel) container = SimplePanel(Width=\"400px\", Height=\"200px\") contents2", "vertical scrolling position, respectively. It is in the nature of", "__init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\") self.inner = SimplePanel(Height=\"100%\") self.add(self.inner) self.inner.setStyleName(\"SuperScrollPanelInner\")", "work. This makes it tricky to use it where you", "also change the current scrolling position programmatically by calling ``setScrollPosition(vPos)``", "can be named are not \" + \"unvarying names.<p>It was", "you will have to wrap its content in a SimplePanel", "To avoid this problem you will have to wrap its", "``setAlwaysShowScrollBars(True)``. You can also change the current scrolling position programmatically", "position programmatically by calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to change the", "by calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to change the horizontal and", "the parent widget that could be any absolute or relative", "not \" + \"unvarying names.<p>It was from the Nameless that", "names.<p>It was from the Nameless that \" + \"Heaven and", "forget to grab the css for SuperScrollPanel in Showcase.css! \")", "\" + \"unvarying names.<p>It was from the Nameless that \"", "that could be any absolute or relative size and the", "control its behaviour as shown in the second example: \"container\"", "tricky to use it where you want it to fill", "+ \"way;<p>The names that can be named are not \"", "from pyjamas.ui.HTML import HTML from pyjamas.ui.VerticalPanel import VerticalPanel class ScrollPanelDemo(SimplePanel):", "\" + \"each after its kind.\") panel.add(contents) vert.add(panel) container =", "\"container\" represents the parent widget that could be any absolute", "\"way;<p>The names that can be named are not \" +", "example: \"container\" represents the parent widget that could be any", "Height=\"200px\") contents2 = HTML(50*\"Dont forget to grab the css for", "out a parent widget of unknown size. To avoid this", "if you give it a relative size, it will not", "parent widget that could be any absolute or relative size", "import VerticalPanel class ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self) vert = VerticalPanel()", "you want the scroll bars to be always visible, call", "you want it to fill out a parent widget of", "\") panel2 = SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel): def __init__(self,", "and the superscrollpanel will fill it out and apply vertical", "not work. This makes it tricky to use it where", "pyjamas.ui.HTML import HTML from pyjamas.ui.VerticalPanel import VerticalPanel class ScrollPanelDemo(SimplePanel): def", "\" + \"Heaven and Earth sprang;<p>The named is but the", "If you want the scroll bars to be always visible,", "This makes it tricky to use it where you want", "SimplePanel.__init__(self) vert = VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel = ScrollPanel(Size=(\"300px\", \"100px\"))", "and ``setScrollHorizontalPosition(hPos)`` to change the horizontal and vertical scrolling position,", "where you want it to fill out a parent widget", "be any absolute or relative size and the superscrollpanel will", "__init__(self): SimplePanel.__init__(self) vert = VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel = ScrollPanel(Size=(\"300px\",", "not an unvarying \" + \"way;<p>The names that can be", "programmatically by calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to change the horizontal", "+ \"unvarying names.<p>It was from the Nameless that \" +", "that can be named are not \" + \"unvarying names.<p>It", "scrolls its contents. If you want the scroll bars to", "css/oveflow to control its behaviour as shown in the second", "it will not work. This makes it tricky to use", "from pyjamas.ui.VerticalPanel import VerticalPanel class ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self) vert", "behaviour as shown in the second example: \"container\" represents the", "shown in the second example: \"container\" represents the parent widget", "that can be told of is not an unvarying \"", "always visible, call ``setAlwaysShowScrollBars(True)``. You can also change the current", "for SuperScrollPanel in Showcase.css! \") panel2 = SuperScrollPanel(contents2) container.add(panel2) vert.add(container)", "SuperScrollPanel(ScrollPanel): def __init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\") self.inner = SimplePanel(Height=\"100%\")", "css for SuperScrollPanel in Showcase.css! \") panel2 = SuperScrollPanel(contents2) container.add(panel2)", "Ching, Chapter One</b><p>\" + \"The Way that can be told", "def __init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\") self.inner = SimplePanel(Height=\"100%\") self.add(self.inner)", "it tricky to use it where you want it to", "``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to change the horizontal and vertical scrolling", "scrollpanel that if you give it a relative size, it", "call ``setAlwaysShowScrollBars(True)``. You can also change the current scrolling position", "contents = HTML(\"<b>Tao Te Ching, Chapter One</b><p>\" + \"The Way", "a panel that scrolls its contents. If you want the", "the Nameless that \" + \"Heaven and Earth sprang;<p>The named", "visible, call ``setAlwaysShowScrollBars(True)``. You can also change the current scrolling", "the scroll bars to be always visible, call ``setAlwaysShowScrollBars(True)``. You", "in the second example: \"container\" represents the parent widget that", "size. To avoid this problem you will have to wrap", "thousand creatures, \" + \"each after its kind.\") panel.add(contents) vert.add(panel)", "makes it tricky to use it where you want it", "the nature of a scrollpanel that if you give it", "implements a panel that scrolls its contents. If you want", "to wrap its content in a SimplePanel and then use", "\"\"\" The ``ui.ScrollPanel`` class implements a panel that scrolls its", "grab the css for SuperScrollPanel in Showcase.css! \") panel2 =", "its kind.\") panel.add(contents) vert.add(panel) container = SimplePanel(Width=\"400px\", Height=\"200px\") contents2 =", "panel that scrolls its contents. If you want the scroll", "ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self) vert = VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel", "horizontal and vertical scrolling position, respectively. It is in the", "will not work. This makes it tricky to use it", "ten thousand creatures, \" + \"each after its kind.\") panel.add(contents)", "relative size, it will not work. This makes it tricky", "avoid this problem you will have to wrap its content", "that if you give it a relative size, it will", "= VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel = ScrollPanel(Size=(\"300px\", \"100px\")) contents =", "or relative size and the superscrollpanel will fill it out", "VerticalPanel class ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self) vert = VerticalPanel() vert.setSpacing(\"10px\")", "to grab the css for SuperScrollPanel in Showcase.css! \") panel2", "size, it will not work. This makes it tricky to", "scrolling position, respectively. It is in the nature of a", "creatures, \" + \"each after its kind.\") panel.add(contents) vert.add(panel) container", "to use it where you want it to fill out", "its content in a SimplePanel and then use css/oveflow to", "can also change the current scrolling position programmatically by calling", "current scrolling position programmatically by calling ``setScrollPosition(vPos)`` and ``setScrollHorizontalPosition(hPos)`` to", "class implements a panel that scrolls its contents. If you", "= ScrollPanel(Size=(\"300px\", \"100px\")) contents = HTML(\"<b>Tao Te Ching, Chapter One</b><p>\"", "Earth sprang;<p>The named is but the \" + \"mother that", "the \" + \"mother that rears the ten thousand creatures,", "size and the superscrollpanel will fill it out and apply", "any absolute or relative size and the superscrollpanel will fill", "sprang;<p>The named is but the \" + \"mother that rears", "Nameless that \" + \"Heaven and Earth sprang;<p>The named is", "and apply vertical scrollbars if needed. \"\"\" from pyjamas.ui.SimplePanel import", "position, respectively. It is in the nature of a scrollpanel", "it out and apply vertical scrollbars if needed. \"\"\" from", "Te Ching, Chapter One</b><p>\" + \"The Way that can be", "One</b><p>\" + \"The Way that can be told of is", "self.add(vert) panel = ScrollPanel(Size=(\"300px\", \"100px\")) contents = HTML(\"<b>Tao Te Ching,", "+ \"The Way that can be told of is not", "vert.add(container) class SuperScrollPanel(ScrollPanel): def __init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\") self.inner", "will have to wrap its content in a SimplePanel and", "vert.setSpacing(\"10px\") self.add(vert) panel = ScrollPanel(Size=(\"300px\", \"100px\")) contents = HTML(\"<b>Tao Te", "that \" + \"Heaven and Earth sprang;<p>The named is but", "named are not \" + \"unvarying names.<p>It was from the", "contents2 = HTML(50*\"Dont forget to grab the css for SuperScrollPanel", "widget that could be any absolute or relative size and", "want the scroll bars to be always visible, call ``setAlwaysShowScrollBars(True)``.", "as shown in the second example: \"container\" represents the parent", "give it a relative size, it will not work. This", "you give it a relative size, it will not work.", "use css/oveflow to control its behaviour as shown in the", "panel = ScrollPanel(Size=(\"300px\", \"100px\")) contents = HTML(\"<b>Tao Te Ching, Chapter", "ScrollPanel(Size=(\"300px\", \"100px\")) contents = HTML(\"<b>Tao Te Ching, Chapter One</b><p>\" +", "the second example: \"container\" represents the parent widget that could", "to fill out a parent widget of unknown size. To", "class SuperScrollPanel(ScrollPanel): def __init__(self, panel): ScrollPanel.__init__(self) self.setHeight(\"100%\") self.setStyleName(\"SuperScrollPanelOuter\") self.inner =", "to be always visible, call ``setAlwaysShowScrollBars(True)``. You can also change", "is in the nature of a scrollpanel that if you", "\" + \"mother that rears the ten thousand creatures, \"", "= HTML(50*\"Dont forget to grab the css for SuperScrollPanel in", "The ``ui.ScrollPanel`` class implements a panel that scrolls its contents.", "names that can be named are not \" + \"unvarying", "that rears the ten thousand creatures, \" + \"each after", "import SimplePanel from pyjamas.ui.ScrollPanel import ScrollPanel from pyjamas.ui.HTML import HTML", "that scrolls its contents. If you want the scroll bars", "of a scrollpanel that if you give it a relative", "a SimplePanel and then use css/oveflow to control its behaviour", "SimplePanel and then use css/oveflow to control its behaviour as", "Showcase.css! \") panel2 = SuperScrollPanel(contents2) container.add(panel2) vert.add(container) class SuperScrollPanel(ScrollPanel): def", "VerticalPanel() vert.setSpacing(\"10px\") self.add(vert) panel = ScrollPanel(Size=(\"300px\", \"100px\")) contents = HTML(\"<b>Tao", "pyjamas.ui.VerticalPanel import VerticalPanel class ScrollPanelDemo(SimplePanel): def __init__(self): SimplePanel.__init__(self) vert =", "the horizontal and vertical scrolling position, respectively. It is in", "in a SimplePanel and then use css/oveflow to control its", "then use css/oveflow to control its behaviour as shown in", "was from the Nameless that \" + \"Heaven and Earth" ]
[ "word[1]) except: print(\"Sleeping five seconds...\") time.sleep(5) if not query: print(word)", "for word in f: word = word.strip() if phone_spellable.match(word): candidate_words.append((len(word),", "<filename>psdn.py #!/usr/bin/env python3 # May you recognize your weaknesses and", "False while query is False: try: query = whois.query('%s.com' %", "share your strengths. # May you share freely, never taking", "import time import whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words = []", "everyone you find. import re import time import whois phone_spellable", "% word[1]) except: print(\"Sleeping five seconds...\") time.sleep(5) if not query:", "import re import time import whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words", "phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort() for word in candidate_words: query =", "# May you find love and love everyone you find.", "try: query = whois.query('%s.com' % word[1]) except: print(\"Sleeping five seconds...\")", "your strengths. # May you share freely, never taking more", "more than you give. # May you find love and", "and love everyone you find. import re import time import", "weaknesses and share your strengths. # May you share freely,", "= [] with open('/usr/share/dict/words') as f: for word in f:", "you find. import re import time import whois phone_spellable =", "False: try: query = whois.query('%s.com' % word[1]) except: print(\"Sleeping five", "word)) candidate_words.sort() for word in candidate_words: query = False while", "word = word.strip() if phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort() for word", "phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words = [] with open('/usr/share/dict/words') as f:", "never taking more than you give. # May you find", "while query is False: try: query = whois.query('%s.com' % word[1])", "time import whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words = [] with", "if phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort() for word in candidate_words: query", "share freely, never taking more than you give. # May", "query is False: try: query = whois.query('%s.com' % word[1]) except:", "you share freely, never taking more than you give. #", "May you find love and love everyone you find. import", "and share your strengths. # May you share freely, never", "your weaknesses and share your strengths. # May you share", "you find love and love everyone you find. import re", "whois.query('%s.com' % word[1]) except: print(\"Sleeping five seconds...\") time.sleep(5) if not", "love everyone you find. import re import time import whois", "in f: word = word.strip() if phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort()", "f: word = word.strip() if phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort() for", "than you give. # May you find love and love", "candidate_words.append((len(word), word)) candidate_words.sort() for word in candidate_words: query = False", "as f: for word in f: word = word.strip() if", "#!/usr/bin/env python3 # May you recognize your weaknesses and share", "# May you recognize your weaknesses and share your strengths.", "candidate_words.sort() for word in candidate_words: query = False while query", "query = False while query is False: try: query =", "recognize your weaknesses and share your strengths. # May you", "you give. # May you find love and love everyone", "word.strip() if phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort() for word in candidate_words:", "re.compile(r'^[filoqrsuwxy]+$') candidate_words = [] with open('/usr/share/dict/words') as f: for word", "= word.strip() if phone_spellable.match(word): candidate_words.append((len(word), word)) candidate_words.sort() for word in", "find love and love everyone you find. import re import", "for word in candidate_words: query = False while query is", "candidate_words = [] with open('/usr/share/dict/words') as f: for word in", "[] with open('/usr/share/dict/words') as f: for word in f: word", "word in candidate_words: query = False while query is False:", "import whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words = [] with open('/usr/share/dict/words')", "re import time import whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words =", "word in f: word = word.strip() if phone_spellable.match(word): candidate_words.append((len(word), word))", "candidate_words: query = False while query is False: try: query", "whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$') candidate_words = [] with open('/usr/share/dict/words') as", "# May you share freely, never taking more than you", "is False: try: query = whois.query('%s.com' % word[1]) except: print(\"Sleeping", "taking more than you give. # May you find love", "python3 # May you recognize your weaknesses and share your", "= whois.query('%s.com' % word[1]) except: print(\"Sleeping five seconds...\") time.sleep(5) if", "query = whois.query('%s.com' % word[1]) except: print(\"Sleeping five seconds...\") time.sleep(5)", "= re.compile(r'^[filoqrsuwxy]+$') candidate_words = [] with open('/usr/share/dict/words') as f: for", "with open('/usr/share/dict/words') as f: for word in f: word =", "you recognize your weaknesses and share your strengths. # May", "open('/usr/share/dict/words') as f: for word in f: word = word.strip()", "May you recognize your weaknesses and share your strengths. #", "freely, never taking more than you give. # May you", "find. import re import time import whois phone_spellable = re.compile(r'^[filoqrsuwxy]+$')", "in candidate_words: query = False while query is False: try:", "= False while query is False: try: query = whois.query('%s.com'", "May you share freely, never taking more than you give.", "f: for word in f: word = word.strip() if phone_spellable.match(word):", "love and love everyone you find. import re import time", "strengths. # May you share freely, never taking more than", "give. # May you find love and love everyone you" ]
[ "self.config = config self.CallGoTCList = [] def create_testcase_list(self): ''' Creates", "config self.CallGoTCList = [] def create_testcase_list(self): ''' Creates a test", "############################################################ # Construct 'go' test cases for k in self.config.test_case['go'].keys():", "self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5], direction = 0,", "test case list out of the configuration ''' # ############################################################", "July 2020 * @version 0.1 * @brief Implements a class", "= self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header =", "rx_node_addr = self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3], hdr_len", "* @file ElevatorTestCaseList.py * @author <NAME> * @date 30 July", "= [] def create_testcase_list(self): ''' Creates a test case list", "= msgHdr, time_tag = self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'], floor_num =", "self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['go'][k][4],", "msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2], msg_class", "program life cycle. ''' #!/usr/bin/env python3 import sys import ctypes", "for k in self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr", "of the configuration and holds it during the runtime '''", "time_tag = self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5], direction", "as cfg import ElevatorMsgProtocol as msgProto class ElevatorTestCaseList: ''' This", "a test case list out of the configuration ''' #", "= self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2], msg_class =", "floor_num = self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7], state", "self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1], msg_id", "cycle. ''' #!/usr/bin/env python3 import sys import ctypes import ElevatorConfig", "<NAME> * @date 30 July 2020 * @version 0.1 *", "self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1], msg_id", "go_msg_id = self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO)) # ############################################################ # Construct", "self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3],", "''' #!/usr/bin/env python3 import sys import ctypes import ElevatorConfig as", "msg_id = self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len", "msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1], msg_id =", "and holds it during the runtime ''' def __init__(self, config):", "self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO)) # ############################################################ #", "during the program life cycle. ''' #!/usr/bin/env python3 import sys", "@file ElevatorTestCaseList.py * @author <NAME> * @date 30 July 2020", "configuration ''' # ############################################################ # Construct 'call' test cases for", "= self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag =", "payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['call'][k][4], req_typ", "case list out of the configuration and holds it during", "def create_testcase_list(self): ''' Creates a test case list out of", "@brief Implements a class to hold all the test cases", "floor_num = self.config.test_case['go'][k][5], direction = 0, go_msg_id = 0, state", "the configuration and holds it during the runtime ''' def", "''' This class builds a test case list out of", "= self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2], msg_class =", "out of the configuration ''' # ############################################################ # Construct 'call'", "self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'],", "of the configuration ''' # ############################################################ # Construct 'call' test", "@author <NAME> * @date 30 July 2020 * @version 0.1", "= msgHdr, time_tag = self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'], floor_num =", "test cases for k in self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr =", "2020 * @version 0.1 * @brief Implements a class to", "self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'], floor_num", "import ctypes import ElevatorConfig as cfg import ElevatorMsgProtocol as msgProto", "class builds a test case list out of the configuration", "__init__(self, config): self.config = config self.CallGoTCList = [] def create_testcase_list(self):", "cases for k in self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0],", "direction = self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO)) #", "= self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['call'][k][4], req_typ =", "Creates a test case list out of the configuration '''", "= self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len =", "self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'],", "python3 import sys import ctypes import ElevatorConfig as cfg import", "= self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header =", "* @date 30 July 2020 * @version 0.1 * @brief", "life cycle. ''' #!/usr/bin/env python3 import sys import ctypes import", "case list out of the configuration ''' # ############################################################ #", "# ############################################################ # Construct 'call' test cases for k in", "= self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len =", "cases during the program life cycle. ''' #!/usr/bin/env python3 import", "it during the runtime ''' def __init__(self, config): self.config =", "configuration and holds it during the runtime ''' def __init__(self,", "cfg import ElevatorMsgProtocol as msgProto class ElevatorTestCaseList: ''' This class", "msgHdr, time_tag = self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5],", "msg_class = self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header", "= self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO)) # ############################################################ # Construct 'go'", "= self.config.test_case['go'][k][5], direction = 0, go_msg_id = 0, state =", "runtime ''' def __init__(self, config): self.config = config self.CallGoTCList =", "msgProto class ElevatorTestCaseList: ''' This class builds a test case", "test cases during the program life cycle. ''' #!/usr/bin/env python3", "self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO))", "a test case list out of the configuration and holds", "= msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2],", "msgHdr, time_tag = self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5],", "time_tag = self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5], direction", "self.CallGoTCList = [] def create_testcase_list(self): ''' Creates a test case", "30 July 2020 * @version 0.1 * @brief Implements a", "state = msgProto.CallGoState.READY2GO)) # ############################################################ # Construct 'go' test cases", "''' Creates a test case list out of the configuration", "self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len'])", "msgProto.CallGoState.READY2GO)) # ############################################################ # Construct 'go' test cases for k", "''' # ############################################################ # Construct 'call' test cases for k", "''' * @file ElevatorTestCaseList.py * @author <NAME> * @date 30", "0.1 * @brief Implements a class to hold all the", "import ElevatorConfig as cfg import ElevatorMsgProtocol as msgProto class ElevatorTestCaseList:", "This class builds a test case list out of the", "sys import ctypes import ElevatorConfig as cfg import ElevatorMsgProtocol as", "self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'],", "ctypes import ElevatorConfig as cfg import ElevatorMsgProtocol as msgProto class", "import ElevatorMsgProtocol as msgProto class ElevatorTestCaseList: ''' This class builds", "list out of the configuration and holds it during the", "ElevatorTestCaseList.py * @author <NAME> * @date 30 July 2020 *", "= msgProto.CallGoState.READY2GO)) # ############################################################ # Construct 'go' test cases for", "k in self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr =", "self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr,", "req_typ = self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5], direction = 0, go_msg_id", "as msgProto class ElevatorTestCaseList: ''' This class builds a test", "the runtime ''' def __init__(self, config): self.config = config self.CallGoTCList", "[] def create_testcase_list(self): ''' Creates a test case list out", "out of the configuration and holds it during the runtime", "msg_class = self.config.test_case['go'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header", "ElevatorTestCaseList: ''' This class builds a test case list out", "= self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['go'][k][4], req_typ =", "payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['go'][k][4], req_typ", "during the runtime ''' def __init__(self, config): self.config = config", "hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag", "class to hold all the test cases during the program", "Construct 'call' test cases for k in self.config.test_case['call'].keys(): msgHdr =", "rx_node_addr = self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3], hdr_len", "list out of the configuration ''' # ############################################################ # Construct", "to hold all the test cases during the program life", "test case list out of the configuration and holds it", "cases for k in self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0],", "def __init__(self, config): self.config = config self.CallGoTCList = [] def", "self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7],", "the test cases during the program life cycle. ''' #!/usr/bin/env", "* @author <NAME> * @date 30 July 2020 * @version", "#!/usr/bin/env python3 import sys import ctypes import ElevatorConfig as cfg", "= msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2],", "= self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6], go_msg_id =", "self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5], direction = 0, go_msg_id = 0,", "ElevatorConfig as cfg import ElevatorMsgProtocol as msgProto class ElevatorTestCaseList: '''", "self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'],", "a class to hold all the test cases during the", "= config self.CallGoTCList = [] def create_testcase_list(self): ''' Creates a", "self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3],", "= self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5], direction = 0, go_msg_id =", "= self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO)) # ############################################################", "import sys import ctypes import ElevatorConfig as cfg import ElevatorMsgProtocol", "msg_id = self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len", "# Construct 'go' test cases for k in self.config.test_case['go'].keys(): msgHdr", "self.config.test_case['go'][k][5], direction = 0, go_msg_id = 0, state = msgProto.CallGoState.RESET))", "in self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1],", "class ElevatorTestCaseList: ''' This class builds a test case list", "Construct 'go' test cases for k in self.config.test_case['go'].keys(): msgHdr =", "k in self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr =", "hold all the test cases during the program life cycle.", "Implements a class to hold all the test cases during", "holds it during the runtime ''' def __init__(self, config): self.config", "@version 0.1 * @brief Implements a class to hold all", "'call' test cases for k in self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr", "msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['go'][k][0], rx_node_addr = self.config.test_case['go'][k][1], msg_id =", "# Construct 'call' test cases for k in self.config.test_case['call'].keys(): msgHdr", "msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2], msg_class", "req_typ = self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6], go_msg_id", "builds a test case list out of the configuration and", "* @version 0.1 * @brief Implements a class to hold", "= self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5], direction =", "# ############################################################ # Construct 'go' test cases for k in", "test cases for k in self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr =", "self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'], floor_num", "'go' test cases for k in self.config.test_case['go'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr", "all the test cases during the program life cycle. '''", "config): self.config = config self.CallGoTCList = [] def create_testcase_list(self): '''", "############################################################ # Construct 'call' test cases for k in self.config.test_case['call'].keys():", "for k in self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr", "= self.config.test_case['go'][k][4], req_typ = self.config.usr_request['go'], floor_num = self.config.test_case['go'][k][5], direction =", "self.config.test_case['call'][k][7], state = msgProto.CallGoState.READY2GO)) # ############################################################ # Construct 'go' test", "''' def __init__(self, config): self.config = config self.CallGoTCList = []", "* @brief Implements a class to hold all the test", "self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len'])", "= self.config.test_case['call'][k][1], msg_id = self.config.test_case['call'][k][2], msg_class = self.config.test_case['call'][k][3], hdr_len =", "= self.config.test_case['go'][k][1], msg_id = self.config.test_case['go'][k][2], msg_class = self.config.test_case['go'][k][3], hdr_len =", "= self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6], go_msg_id = self.config.test_case['call'][k][7], state =", "the program life cycle. ''' #!/usr/bin/env python3 import sys import", "create_testcase_list(self): ''' Creates a test case list out of the", "ElevatorMsgProtocol as msgProto class ElevatorTestCaseList: ''' This class builds a", "in self.config.test_case['call'].keys(): msgHdr = msgProto.MsgHeader(tx_node_addr = self.config.test_case['call'][k][0], rx_node_addr = self.config.test_case['call'][k][1],", "self.config.test_case['call'][k][3], hdr_len = self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr,", "@date 30 July 2020 * @version 0.1 * @brief Implements", "the configuration ''' # ############################################################ # Construct 'call' test cases", "self.config.network['packet_header_len'], payload_len = self.config.network['packet_payload_req_len']) self.CallGoTCList.append(msgProto.EncodeReqPacket(msg_header = msgHdr, time_tag = self.config.test_case['call'][k][4],", "self.config.test_case['call'][k][4], req_typ = self.config.usr_request['call'], floor_num = self.config.test_case['call'][k][5], direction = self.config.test_case['call'][k][6]," ]
[ "return JsonResponse({ 'message': message, 'result': result, 'total': cart.printable_total }) def", "def _cart_action_view(request, action_factory, form_class, message): form = form_class(data=request.POST) if not", "get_modal(request): cart = get_cart(request) return render(request, 'cart/modal.html', {'cart': cart}) @csrf_exempt", "'result': result, 'total': cart.printable_total }) def add(request): return _cart_action_view( request,", "form.is_valid(): return JsonResponse({'message': form.errors.as_json()}, status=403) cart = get_cart(request) try: result", "_cart_action_view( request, action_factory=lambda cart, data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed from", "require_POST from django.http import JsonResponse from django.shortcuts import render from", "cart.lib import get_cart from cart.forms import SelectProductForm, SetQtyForm @require_POST def", "message=ugettext('Product removed from cart') ) def get_modal(request): cart = get_cart(request)", "not form.is_valid(): return JsonResponse({'message': form.errors.as_json()}, status=403) cart = get_cart(request) try:", "}) def add(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.add(**data),", "import ugettext from django.views.decorators.http import require_POST from django.http import JsonResponse", "import render from django.core.exceptions import ValidationError from django.views.decorators.csrf import csrf_exempt", "data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed from cart') ) def get_modal(request):", "result, 'total': cart.printable_total }) def add(request): return _cart_action_view( request, action_factory=lambda", "django.views.decorators.http import require_POST from django.http import JsonResponse from django.shortcuts import", "@require_POST def _cart_action_view(request, action_factory, form_class, message): form = form_class(data=request.POST) if", "if not form.is_valid(): return JsonResponse({'message': form.errors.as_json()}, status=403) cart = get_cart(request)", "= get_cart(request) try: result = action_factory(cart, form.cleaned_data) except ValidationError as", "from django.shortcuts import render from django.core.exceptions import ValidationError from django.views.decorators.csrf", "cart, data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added to cart') ) def", "cart, data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed from cart') ) def", "message): form = form_class(data=request.POST) if not form.is_valid(): return JsonResponse({'message': form.errors.as_json()},", "set_qty(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.set_qty(**data), form_class=SetQtyForm, message=ugettext('Quantity", "result = action_factory(cart, form.cleaned_data) except ValidationError as e: return JsonResponse({'message':", "action_factory, form_class, message): form = form_class(data=request.POST) if not form.is_valid(): return", "ValidationError as e: return JsonResponse({'message': ', '.join(e.messages)}, status=403) return JsonResponse({", "try: result = action_factory(cart, form.cleaned_data) except ValidationError as e: return", "from django.utils.translation import ugettext from django.views.decorators.http import require_POST from django.http", "form.errors.as_json()}, status=403) cart = get_cart(request) try: result = action_factory(cart, form.cleaned_data)", "import ValidationError from django.views.decorators.csrf import csrf_exempt from cart.lib import get_cart", "cart}) @csrf_exempt def set_qty(request): return _cart_action_view( request, action_factory=lambda cart, data:", "from django.http import JsonResponse from django.shortcuts import render from django.core.exceptions", "def remove(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.remove(**data), form_class=SelectProductForm,", "cart.forms import SelectProductForm, SetQtyForm @require_POST def _cart_action_view(request, action_factory, form_class, message):", "return JsonResponse({'message': form.errors.as_json()}, status=403) cart = get_cart(request) try: result =", "def add(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.add(**data), form_class=SelectProductForm,", "action_factory=lambda cart, data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added to cart') )", "cart = get_cart(request) return render(request, 'cart/modal.html', {'cart': cart}) @csrf_exempt def", "django.utils.translation import ugettext from django.views.decorators.http import require_POST from django.http import", "def set_qty(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.set_qty(**data), form_class=SetQtyForm,", "import require_POST from django.http import JsonResponse from django.shortcuts import render", "as e: return JsonResponse({'message': ', '.join(e.messages)}, status=403) return JsonResponse({ 'message':", "e: return JsonResponse({'message': ', '.join(e.messages)}, status=403) return JsonResponse({ 'message': message,", "{'cart': cart}) @csrf_exempt def set_qty(request): return _cart_action_view( request, action_factory=lambda cart,", "csrf_exempt from cart.lib import get_cart from cart.forms import SelectProductForm, SetQtyForm", "data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added to cart') ) def remove(request):", "add(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product", "_cart_action_view( request, action_factory=lambda cart, data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added to", "message=ugettext('Product added to cart') ) def remove(request): return _cart_action_view( request,", "cart') ) def get_modal(request): cart = get_cart(request) return render(request, 'cart/modal.html',", "cart') ) def remove(request): return _cart_action_view( request, action_factory=lambda cart, data:", "status=403) cart = get_cart(request) try: result = action_factory(cart, form.cleaned_data) except", "from django.core.exceptions import ValidationError from django.views.decorators.csrf import csrf_exempt from cart.lib", "return _cart_action_view( request, action_factory=lambda cart, data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added", "return JsonResponse({'message': ', '.join(e.messages)}, status=403) return JsonResponse({ 'message': message, 'result':", "ugettext from django.views.decorators.http import require_POST from django.http import JsonResponse from", "request, action_factory=lambda cart, data: cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added to cart')", "form.cleaned_data) except ValidationError as e: return JsonResponse({'message': ', '.join(e.messages)}, status=403)", "@csrf_exempt def set_qty(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.set_qty(**data),", "remove(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product", "import csrf_exempt from cart.lib import get_cart from cart.forms import SelectProductForm,", "JsonResponse({'message': ', '.join(e.messages)}, status=403) return JsonResponse({ 'message': message, 'result': result,", "cart.add(**data), form_class=SelectProductForm, message=ugettext('Product added to cart') ) def remove(request): return", "return _cart_action_view( request, action_factory=lambda cart, data: cart.set_qty(**data), form_class=SetQtyForm, message=ugettext('Quantity updated')", "'cart/modal.html', {'cart': cart}) @csrf_exempt def set_qty(request): return _cart_action_view( request, action_factory=lambda", "cart.printable_total }) def add(request): return _cart_action_view( request, action_factory=lambda cart, data:", "form_class(data=request.POST) if not form.is_valid(): return JsonResponse({'message': form.errors.as_json()}, status=403) cart =", "django.core.exceptions import ValidationError from django.views.decorators.csrf import csrf_exempt from cart.lib import", "request, action_factory=lambda cart, data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed from cart')", "JsonResponse({ 'message': message, 'result': result, 'total': cart.printable_total }) def add(request):", ") def get_modal(request): cart = get_cart(request) return render(request, 'cart/modal.html', {'cart':", "import get_cart from cart.forms import SelectProductForm, SetQtyForm @require_POST def _cart_action_view(request,", "render from django.core.exceptions import ValidationError from django.views.decorators.csrf import csrf_exempt from", "from django.views.decorators.csrf import csrf_exempt from cart.lib import get_cart from cart.forms", "', '.join(e.messages)}, status=403) return JsonResponse({ 'message': message, 'result': result, 'total':", "SelectProductForm, SetQtyForm @require_POST def _cart_action_view(request, action_factory, form_class, message): form =", "= action_factory(cart, form.cleaned_data) except ValidationError as e: return JsonResponse({'message': ',", "SetQtyForm @require_POST def _cart_action_view(request, action_factory, form_class, message): form = form_class(data=request.POST)", "_cart_action_view( request, action_factory=lambda cart, data: cart.set_qty(**data), form_class=SetQtyForm, message=ugettext('Quantity updated') )", "except ValidationError as e: return JsonResponse({'message': ', '.join(e.messages)}, status=403) return", "cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed from cart') ) def get_modal(request): cart", "to cart') ) def remove(request): return _cart_action_view( request, action_factory=lambda cart,", "'.join(e.messages)}, status=403) return JsonResponse({ 'message': message, 'result': result, 'total': cart.printable_total", "removed from cart') ) def get_modal(request): cart = get_cart(request) return", "'total': cart.printable_total }) def add(request): return _cart_action_view( request, action_factory=lambda cart,", "return render(request, 'cart/modal.html', {'cart': cart}) @csrf_exempt def set_qty(request): return _cart_action_view(", "JsonResponse({'message': form.errors.as_json()}, status=403) cart = get_cart(request) try: result = action_factory(cart,", "message, 'result': result, 'total': cart.printable_total }) def add(request): return _cart_action_view(", "get_cart(request) return render(request, 'cart/modal.html', {'cart': cart}) @csrf_exempt def set_qty(request): return", "render(request, 'cart/modal.html', {'cart': cart}) @csrf_exempt def set_qty(request): return _cart_action_view( request,", "django.shortcuts import render from django.core.exceptions import ValidationError from django.views.decorators.csrf import", ") def remove(request): return _cart_action_view( request, action_factory=lambda cart, data: cart.remove(**data),", "ValidationError from django.views.decorators.csrf import csrf_exempt from cart.lib import get_cart from", "from django.views.decorators.http import require_POST from django.http import JsonResponse from django.shortcuts", "<gh_stars>1-10 from django.utils.translation import ugettext from django.views.decorators.http import require_POST from", "get_cart(request) try: result = action_factory(cart, form.cleaned_data) except ValidationError as e:", "from cart.lib import get_cart from cart.forms import SelectProductForm, SetQtyForm @require_POST", "form_class=SelectProductForm, message=ugettext('Product removed from cart') ) def get_modal(request): cart =", "form_class, message): form = form_class(data=request.POST) if not form.is_valid(): return JsonResponse({'message':", "action_factory=lambda cart, data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed from cart') )", "form = form_class(data=request.POST) if not form.is_valid(): return JsonResponse({'message': form.errors.as_json()}, status=403)", "= get_cart(request) return render(request, 'cart/modal.html', {'cart': cart}) @csrf_exempt def set_qty(request):", "cart = get_cart(request) try: result = action_factory(cart, form.cleaned_data) except ValidationError", "added to cart') ) def remove(request): return _cart_action_view( request, action_factory=lambda", "from cart') ) def get_modal(request): cart = get_cart(request) return render(request,", "'message': message, 'result': result, 'total': cart.printable_total }) def add(request): return", "import JsonResponse from django.shortcuts import render from django.core.exceptions import ValidationError", "status=403) return JsonResponse({ 'message': message, 'result': result, 'total': cart.printable_total })", "django.http import JsonResponse from django.shortcuts import render from django.core.exceptions import", "django.views.decorators.csrf import csrf_exempt from cart.lib import get_cart from cart.forms import", "= form_class(data=request.POST) if not form.is_valid(): return JsonResponse({'message': form.errors.as_json()}, status=403) cart", "return _cart_action_view( request, action_factory=lambda cart, data: cart.remove(**data), form_class=SelectProductForm, message=ugettext('Product removed", "form_class=SelectProductForm, message=ugettext('Product added to cart') ) def remove(request): return _cart_action_view(", "import SelectProductForm, SetQtyForm @require_POST def _cart_action_view(request, action_factory, form_class, message): form", "from cart.forms import SelectProductForm, SetQtyForm @require_POST def _cart_action_view(request, action_factory, form_class,", "action_factory(cart, form.cleaned_data) except ValidationError as e: return JsonResponse({'message': ', '.join(e.messages)},", "JsonResponse from django.shortcuts import render from django.core.exceptions import ValidationError from", "_cart_action_view(request, action_factory, form_class, message): form = form_class(data=request.POST) if not form.is_valid():", "def get_modal(request): cart = get_cart(request) return render(request, 'cart/modal.html', {'cart': cart})", "get_cart from cart.forms import SelectProductForm, SetQtyForm @require_POST def _cart_action_view(request, action_factory," ]
[ "F: color of moved piece (0-1) # G: promotion figure", "= 5 KING = 6 FIGURE_NAME = [ \"\", \"pawn\",", "FIGURE_NAME = [ \"\", \"pawn\", \"knight\", \"bishop\", \"rook\", \"queen\", \"king\"", "ASCII_FIG[BLACK] = [ 'x', chr(9823), chr(9822), chr(9821), chr(9820), chr(9819), chr(9818)]", "(9, 6) MOVE_FIG_START = (15, 3) MOVE_FIG_CAPTURE = (18, 3)", "onTurnLabel PLAYER_COLOR = [\"white\", \"black\"] #figures PAWN = 1 KNIGHT", "F7, G7, H7, \\ A8, B8, C8, D8, E8, F8,", "F2, G2, H2, \\ A3, B3, C3, D3, E3, F3,", "xxxxxx xxxxxx xxx # G F E D C B", "xxx # G F E D C B A #", "5 KING = 6 FIGURE_NAME = [ \"\", \"pawn\", \"knight\",", "sq (0-63) # C: destination sq (0-63) # D: start", "(0, 3) MOVE_START = (3, 6) MOVE_DEST = (9, 6)", "G, H = range(8) #all squares A1, B1, C1, D1,", "[ 'x', chr(9817), chr(9816), chr(9815), chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK] =", "CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE = range(7) #----- move", "(0-3) #NAME = (start_bit, lenght) MOVE_TYPE = (0, 3) MOVE_START", "DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE = range(7) #----- move 32bit reservation", "MOVE_COLOR = (21, 1) MOVE_PROM = (22, 2) #----- castling", "PICKING = 1 INF = 1000000 ASCII_FIG = [[],[]] ASCII_FIG[WHITE]", "player status -----# IDELING = 0 PICKING = 1 INF", "E: captured figure (1-6) # F: color of moved piece", "B6, C6, D6, E6, F6, G6, H6, \\ A7, B7,", "BOTH = 2 #color for onTurnLabel PLAYER_COLOR = [\"white\", \"black\"]", "= range(64) #----- game display constants -----# DEFAULTBORDERWIDTH = 20", "BISHOP = 3 ROOK = 4 QUEEN = 5 KING", "15) COLORS = { \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") } #-----", "(3, 6) MOVE_DEST = (9, 6) MOVE_FIG_START = (15, 3)", "figure (0-3) #NAME = (start_bit, lenght) MOVE_TYPE = (0, 3)", "= 1 #----- player status -----# IDELING = 0 PICKING", "E6, F6, G6, H6, \\ A7, B7, C7, D7, E7,", "G1, H1, \\ A2, B2, C2, D2, E2, F2, G2,", "D3, E3, F3, G3, H3, \\ A4, B4, C4, D4,", "lines A, B, C, D, E, F, G, H =", "C2, D2, E2, F2, G2, H2, \\ A3, B3, C3,", "PROM_QUEEN = 3 #all lines A, B, C, D, E,", "CASTLING_RIGHT = 1 #----- player status -----# IDELING = 0", "MOVE_FIG_START = (15, 3) MOVE_FIG_CAPTURE = (18, 3) MOVE_COLOR =", "NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE = range(7) #-----", "20 DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE = (7, 15) COLORS =", "type (0-6) # B: start sq (0-63) # C: destination", "6) MOVE_DEST = (9, 6) MOVE_FIG_START = (15, 3) MOVE_FIG_CAPTURE", "# F: color of moved piece (0-1) # G: promotion", "stored in 32 bit as follows # xxxxxxxx xx x", "H5, \\ A6, B6, C6, D6, E6, F6, G6, H6,", "PAWN = 1 KNIGHT = 2 BISHOP = 3 ROOK", "1 #----- player status -----# IDELING = 0 PICKING =", "F4, G4, H4, \\ A5, B5, C5, D5, E5, F5,", "in 32 bit as follows # xxxxxxxx xx x xxx", "#figures PAWN = 1 KNIGHT = 2 BISHOP = 3", "\\ A5, B5, C5, D5, E5, F5, G5, H5, \\", "xxx xxxxxx xxxxxx xxx # G F E D C", "D5, E5, F5, G5, H5, \\ A6, B6, C6, D6,", "BLACK = 1 BOTH = 2 #color for onTurnLabel PLAYER_COLOR", "= 3 ROOK = 4 QUEEN = 5 KING =", "figure prom_figure = figure-2 PROM_KNIGHT = 0 PROM_BISHOP = 1", "D8, E8, F8, G8, H8 = range(64) #----- game display", "move is stored in 32 bit as follows # xxxxxxxx", "move type (0-6) # B: start sq (0-63) # C:", "F3, G3, H3, \\ A4, B4, C4, D4, E4, F4,", "#all lines A, B, C, D, E, F, G, H", "xxxxxxxx xx x xxx xxx xxxxxx xxxxxx xxx # G", "# a single move is stored in 32 bit as", "[ \"\", \"pawn\", \"knight\", \"bishop\", \"rook\", \"queen\", \"king\" ] #used", "(0-1) # G: promotion figure (0-3) #NAME = (start_bit, lenght)", "= figure-2 PROM_KNIGHT = 0 PROM_BISHOP = 1 PROM_ROOK =", "MOVE_TYPE = (0, 3) MOVE_START = (3, 6) MOVE_DEST =", "ASCII_FIG = [[],[]] ASCII_FIG[WHITE] = [ 'x', chr(9817), chr(9816), chr(9815),", "C, D, E, F, G, H = range(8) #all squares", "32 bit as follows # xxxxxxxx xx x xxx xxx", "\\ A6, B6, C6, D6, E6, F6, G6, H6, \\", "A6, B6, C6, D6, E6, F6, G6, H6, \\ A7,", "xxxxxx xxx # G F E D C B A", "H8 = range(64) #----- game display constants -----# DEFAULTBORDERWIDTH =", "IDELING = 0 PICKING = 1 INF = 1000000 ASCII_FIG", "E2, F2, G2, H2, \\ A3, B3, C3, D3, E3,", "-----# CASTLING_LEFT = 0 CASTLING_RIGHT = 1 #----- player status", "\"knight\", \"bishop\", \"rook\", \"queen\", \"king\" ] #used in move 32bit", "{ \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") } #----- move types -----#", "= 1 BOTH = 2 #color for onTurnLabel PLAYER_COLOR =", "= [ \"\", \"pawn\", \"knight\", \"bishop\", \"rook\", \"queen\", \"king\" ]", "#all squares A1, B1, C1, D1, E1, F1, G1, H1,", "types -----# NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE =", "G2, H2, \\ A3, B3, C3, D3, E3, F3, G3,", "(0-63) # C: destination sq (0-63) # D: start figure", "#color for onTurnLabel PLAYER_COLOR = [\"white\", \"black\"] #figures PAWN =", "D6, E6, F6, G6, H6, \\ A7, B7, C7, D7,", "#----- castling -----# CASTLING_LEFT = 0 CASTLING_RIGHT = 1 #-----", "(start_bit, lenght) MOVE_TYPE = (0, 3) MOVE_START = (3, 6)", "E5, F5, G5, H5, \\ A6, B6, C6, D6, E6,", "= 45 DEFAULTFONTSIZE = (7, 15) COLORS = { \"bg\":\"#EDC08C\",", "D: start figure (1-6) # E: captured figure (1-6) #", "C7, D7, E7, F7, G7, H7, \\ A8, B8, C8,", "chr(9816), chr(9815), chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK] = [ 'x', chr(9823),", "for promotion figure prom_figure = figure-2 PROM_KNIGHT = 0 PROM_BISHOP", "\"tiles\":(\"#FC9235\", \"#FFB87A\") } #----- move types -----# NORMAL_MOVE, CAPTURE, PROMOTION,", "COLORS = { \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") } #----- move", "[[],[]] ASCII_FIG[WHITE] = [ 'x', chr(9817), chr(9816), chr(9815), chr(9814), chr(9813),", "= 0 PICKING = 1 INF = 1000000 ASCII_FIG =", "constants -----# DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE =", "prom_figure = figure-2 PROM_KNIGHT = 0 PROM_BISHOP = 1 PROM_ROOK", "= { \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") } #----- move types", "B: start sq (0-63) # C: destination sq (0-63) #", "= (7, 15) COLORS = { \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\")", "(15, 3) MOVE_FIG_CAPTURE = (18, 3) MOVE_COLOR = (21, 1)", "C8, D8, E8, F8, G8, H8 = range(64) #----- game", "move 32bit reservation -----# # a single move is stored", "H = range(8) #all squares A1, B1, C1, D1, E1,", "1 INF = 1000000 ASCII_FIG = [[],[]] ASCII_FIG[WHITE] = [", "= 1 PROM_ROOK = 2 PROM_QUEEN = 3 #all lines", "#------ game constants -----# #players WHITE = 0 BLACK =", "} #----- move types -----# NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE,", "ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE = range(7) #----- move 32bit reservation -----#", "0 BLACK = 1 BOTH = 2 #color for onTurnLabel", "H4, \\ A5, B5, C5, D5, E5, F5, G5, H5,", "= 2 BISHOP = 3 ROOK = 4 QUEEN =", "= (22, 2) #----- castling -----# CASTLING_LEFT = 0 CASTLING_RIGHT", "0 PICKING = 1 INF = 1000000 ASCII_FIG = [[],[]]", "chr(9812)] ASCII_FIG[BLACK] = [ 'x', chr(9823), chr(9822), chr(9821), chr(9820), chr(9819),", "E3, F3, G3, H3, \\ A4, B4, C4, D4, E4,", "H7, \\ A8, B8, C8, D8, E8, F8, G8, H8", "a single move is stored in 32 bit as follows", "D C B A # # A: move type (0-6)", "figure (1-6) # F: color of moved piece (0-1) #", "F, G, H = range(8) #all squares A1, B1, C1,", "[ 'x', chr(9823), chr(9822), chr(9821), chr(9820), chr(9819), chr(9818)] #AI constants", "-----# #players WHITE = 0 BLACK = 1 BOTH =", "squares A1, B1, C1, D1, E1, F1, G1, H1, \\", "H1, \\ A2, B2, C2, D2, E2, F2, G2, H2,", "PROM_KNIGHT = 0 PROM_BISHOP = 1 PROM_ROOK = 2 PROM_QUEEN", "G4, H4, \\ A5, B5, C5, D5, E5, F5, G5,", "= (0, 3) MOVE_START = (3, 6) MOVE_DEST = (9,", "sq (0-63) # D: start figure (1-6) # E: captured", "H3, \\ A4, B4, C4, D4, E4, F4, G4, H4,", "destination sq (0-63) # D: start figure (1-6) # E:", "G: promotion figure (0-3) #NAME = (start_bit, lenght) MOVE_TYPE =", "= (3, 6) MOVE_DEST = (9, 6) MOVE_FIG_START = (15,", "xxx xxx xxxxxx xxxxxx xxx # G F E D", "moved piece (0-1) # G: promotion figure (0-3) #NAME =", "= [ 'x', chr(9823), chr(9822), chr(9821), chr(9820), chr(9819), chr(9818)] #AI", "32bit for promotion figure prom_figure = figure-2 PROM_KNIGHT = 0", "2) #----- castling -----# CASTLING_LEFT = 0 CASTLING_RIGHT = 1", "(21, 1) MOVE_PROM = (22, 2) #----- castling -----# CASTLING_LEFT", "C3, D3, E3, F3, G3, H3, \\ A4, B4, C4,", "= [\"white\", \"black\"] #figures PAWN = 1 KNIGHT = 2", "C6, D6, E6, F6, G6, H6, \\ A7, B7, C7,", "reservation -----# # a single move is stored in 32", "F1, G1, H1, \\ A2, B2, C2, D2, E2, F2,", "= (start_bit, lenght) MOVE_TYPE = (0, 3) MOVE_START = (3,", "\"queen\", \"king\" ] #used in move 32bit for promotion figure", "constants -----# #players WHITE = 0 BLACK = 1 BOTH", "D4, E4, F4, G4, H4, \\ A5, B5, C5, D5,", "(22, 2) #----- castling -----# CASTLING_LEFT = 0 CASTLING_RIGHT =", "G6, H6, \\ A7, B7, C7, D7, E7, F7, G7,", "\"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") } #----- move types -----# NORMAL_MOVE,", "MOVE_START = (3, 6) MOVE_DEST = (9, 6) MOVE_FIG_START =", "= (21, 1) MOVE_PROM = (22, 2) #----- castling -----#", "F6, G6, H6, \\ A7, B7, C7, D7, E7, F7,", "(0-6) # B: start sq (0-63) # C: destination sq", "= (18, 3) MOVE_COLOR = (21, 1) MOVE_PROM = (22,", "#----- move 32bit reservation -----# # a single move is", "start sq (0-63) # C: destination sq (0-63) # D:", "promotion figure prom_figure = figure-2 PROM_KNIGHT = 0 PROM_BISHOP =", "= (15, 3) MOVE_FIG_CAPTURE = (18, 3) MOVE_COLOR = (21,", "piece (0-1) # G: promotion figure (0-3) #NAME = (start_bit,", "\"bishop\", \"rook\", \"queen\", \"king\" ] #used in move 32bit for", "C5, D5, E5, F5, G5, H5, \\ A6, B6, C6,", "single move is stored in 32 bit as follows #", "in move 32bit for promotion figure prom_figure = figure-2 PROM_KNIGHT", "E8, F8, G8, H8 = range(64) #----- game display constants", "[\"white\", \"black\"] #figures PAWN = 1 KNIGHT = 2 BISHOP", "3) MOVE_START = (3, 6) MOVE_DEST = (9, 6) MOVE_FIG_START", "= 20 DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE = (7, 15) COLORS", "1 KNIGHT = 2 BISHOP = 3 ROOK = 4", "of moved piece (0-1) # G: promotion figure (0-3) #NAME", "\\ A7, B7, C7, D7, E7, F7, G7, H7, \\", "] #used in move 32bit for promotion figure prom_figure =", "captured figure (1-6) # F: color of moved piece (0-1)", "4 QUEEN = 5 KING = 6 FIGURE_NAME = [", "G3, H3, \\ A4, B4, C4, D4, E4, F4, G4,", "chr(9823), chr(9822), chr(9821), chr(9820), chr(9819), chr(9818)] #AI constants CASTLING_RIGHT_LOSS_PENALTY =", "WHITE = 0 BLACK = 1 BOTH = 2 #color", "promotion figure (0-3) #NAME = (start_bit, lenght) MOVE_TYPE = (0,", "D1, E1, F1, G1, H1, \\ A2, B2, C2, D2,", "as follows # xxxxxxxx xx x xxx xxx xxxxxx xxxxxx", "2 PROM_QUEEN = 3 #all lines A, B, C, D,", "\"\", \"pawn\", \"knight\", \"bishop\", \"rook\", \"queen\", \"king\" ] #used in", "move types -----# NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE", "= 2 PROM_QUEEN = 3 #all lines A, B, C,", "\\ A4, B4, C4, D4, E4, F4, G4, H4, \\", "xx x xxx xxx xxxxxx xxxxxx xxx # G F", "6 FIGURE_NAME = [ \"\", \"pawn\", \"knight\", \"bishop\", \"rook\", \"queen\",", "C B A # # A: move type (0-6) #", "PROM_BISHOP = 1 PROM_ROOK = 2 PROM_QUEEN = 3 #all", "MOVE_FIG_CAPTURE = (18, 3) MOVE_COLOR = (21, 1) MOVE_PROM =", "D2, E2, F2, G2, H2, \\ A3, B3, C3, D3,", "6) MOVE_FIG_START = (15, 3) MOVE_FIG_CAPTURE = (18, 3) MOVE_COLOR", "1) MOVE_PROM = (22, 2) #----- castling -----# CASTLING_LEFT =", "= 4 QUEEN = 5 KING = 6 FIGURE_NAME =", "B2, C2, D2, E2, F2, G2, H2, \\ A3, B3,", "= 1 KNIGHT = 2 BISHOP = 3 ROOK =", "F5, G5, H5, \\ A6, B6, C6, D6, E6, F6,", "\"rook\", \"queen\", \"king\" ] #used in move 32bit for promotion", "A1, B1, C1, D1, E1, F1, G1, H1, \\ A2,", "\\ A8, B8, C8, D8, E8, F8, G8, H8 =", "MOVE_DEST = (9, 6) MOVE_FIG_START = (15, 3) MOVE_FIG_CAPTURE =", "= 6 FIGURE_NAME = [ \"\", \"pawn\", \"knight\", \"bishop\", \"rook\",", "B8, C8, D8, E8, F8, G8, H8 = range(64) #-----", "= 0 PROM_BISHOP = 1 PROM_ROOK = 2 PROM_QUEEN =", "1 BOTH = 2 #color for onTurnLabel PLAYER_COLOR = [\"white\",", "#players WHITE = 0 BLACK = 1 BOTH = 2", "E D C B A # # A: move type", "chr(9813), chr(9812)] ASCII_FIG[BLACK] = [ 'x', chr(9823), chr(9822), chr(9821), chr(9820),", "H6, \\ A7, B7, C7, D7, E7, F7, G7, H7,", "DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE = (7, 15) COLORS = {", "DEFAULTFONTSIZE = (7, 15) COLORS = { \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\",", "INF = 1000000 ASCII_FIG = [[],[]] ASCII_FIG[WHITE] = [ 'x',", "= 1 INF = 1000000 ASCII_FIG = [[],[]] ASCII_FIG[WHITE] =", "F8, G8, H8 = range(64) #----- game display constants -----#", "CASTLING, KING_CAPTURE = range(7) #----- move 32bit reservation -----# #", "\"pawn\", \"knight\", \"bishop\", \"rook\", \"queen\", \"king\" ] #used in move", "C: destination sq (0-63) # D: start figure (1-6) #", "= 3 #all lines A, B, C, D, E, F,", "'x', chr(9817), chr(9816), chr(9815), chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK] = [", "\\ A2, B2, C2, D2, E2, F2, G2, H2, \\", "C1, D1, E1, F1, G1, H1, \\ A2, B2, C2,", "# D: start figure (1-6) # E: captured figure (1-6)", "#used in move 32bit for promotion figure prom_figure = figure-2", "= [[],[]] ASCII_FIG[WHITE] = [ 'x', chr(9817), chr(9816), chr(9815), chr(9814),", "A4, B4, C4, D4, E4, F4, G4, H4, \\ A5,", "A7, B7, C7, D7, E7, F7, G7, H7, \\ A8,", "B7, C7, D7, E7, F7, G7, H7, \\ A8, B8,", "A5, B5, C5, D5, E5, F5, G5, H5, \\ A6,", "ROOK = 4 QUEEN = 5 KING = 6 FIGURE_NAME", "KING = 6 FIGURE_NAME = [ \"\", \"pawn\", \"knight\", \"bishop\",", "A3, B3, C3, D3, E3, F3, G3, H3, \\ A4,", "#----- move types -----# NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING,", "MOVE_PROM = (22, 2) #----- castling -----# CASTLING_LEFT = 0", "bit as follows # xxxxxxxx xx x xxx xxx xxxxxx", "= [ 'x', chr(9817), chr(9816), chr(9815), chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK]", "H2, \\ A3, B3, C3, D3, E3, F3, G3, H3,", "figure-2 PROM_KNIGHT = 0 PROM_BISHOP = 1 PROM_ROOK = 2", "B3, C3, D3, E3, F3, G3, H3, \\ A4, B4,", "-----# IDELING = 0 PICKING = 1 INF = 1000000", "= 1000000 ASCII_FIG = [[],[]] ASCII_FIG[WHITE] = [ 'x', chr(9817),", "(1-6) # E: captured figure (1-6) # F: color of", "lenght) MOVE_TYPE = (0, 3) MOVE_START = (3, 6) MOVE_DEST", "32bit reservation -----# # a single move is stored in", "figure (1-6) # E: captured figure (1-6) # F: color", "= 0 CASTLING_RIGHT = 1 #----- player status -----# IDELING", "start figure (1-6) # E: captured figure (1-6) # F:", "= (9, 6) MOVE_FIG_START = (15, 3) MOVE_FIG_CAPTURE = (18,", "= 0 BLACK = 1 BOTH = 2 #color for", "KNIGHT = 2 BISHOP = 3 ROOK = 4 QUEEN", "for onTurnLabel PLAYER_COLOR = [\"white\", \"black\"] #figures PAWN = 1", "3 ROOK = 4 QUEEN = 5 KING = 6", "1 PROM_ROOK = 2 PROM_QUEEN = 3 #all lines A,", "# # A: move type (0-6) # B: start sq", "= range(8) #all squares A1, B1, C1, D1, E1, F1,", "0 CASTLING_RIGHT = 1 #----- player status -----# IDELING =", "color of moved piece (0-1) # G: promotion figure (0-3)", "3 #all lines A, B, C, D, E, F, G,", "-----# # a single move is stored in 32 bit", "(18, 3) MOVE_COLOR = (21, 1) MOVE_PROM = (22, 2)", "3) MOVE_COLOR = (21, 1) MOVE_PROM = (22, 2) #-----", "B, C, D, E, F, G, H = range(8) #all", "B5, C5, D5, E5, F5, G5, H5, \\ A6, B6,", "C4, D4, E4, F4, G4, H4, \\ A5, B5, C5,", "(1-6) # F: color of moved piece (0-1) # G:", "is stored in 32 bit as follows # xxxxxxxx xx", "follows # xxxxxxxx xx x xxx xxx xxxxxx xxxxxx xxx", "D, E, F, G, H = range(8) #all squares A1,", "# G: promotion figure (0-3) #NAME = (start_bit, lenght) MOVE_TYPE", "# xxxxxxxx xx x xxx xxx xxxxxx xxxxxx xxx #", "ASCII_FIG[WHITE] = [ 'x', chr(9817), chr(9816), chr(9815), chr(9814), chr(9813), chr(9812)]", "QUEEN = 5 KING = 6 FIGURE_NAME = [ \"\",", "= range(7) #----- move 32bit reservation -----# # a single", "move 32bit for promotion figure prom_figure = figure-2 PROM_KNIGHT =", "\"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") } #----- move types -----# NORMAL_MOVE, CAPTURE,", "E1, F1, G1, H1, \\ A2, B2, C2, D2, E2,", "#----- player status -----# IDELING = 0 PICKING = 1", "PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE = range(7) #----- move 32bit", "'x', chr(9823), chr(9822), chr(9821), chr(9820), chr(9819), chr(9818)] #AI constants CASTLING_RIGHT_LOSS_PENALTY", "B1, C1, D1, E1, F1, G1, H1, \\ A2, B2,", "chr(9815), chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK] = [ 'x', chr(9823), chr(9822),", "# A: move type (0-6) # B: start sq (0-63)", "KING_CAPTURE = range(7) #----- move 32bit reservation -----# # a", "A2, B2, C2, D2, E2, F2, G2, H2, \\ A3,", "G7, H7, \\ A8, B8, C8, D8, E8, F8, G8,", "0 PROM_BISHOP = 1 PROM_ROOK = 2 PROM_QUEEN = 3", "x xxx xxx xxxxxx xxxxxx xxx # G F E", "1000000 ASCII_FIG = [[],[]] ASCII_FIG[WHITE] = [ 'x', chr(9817), chr(9816),", "PROM_ROOK = 2 PROM_QUEEN = 3 #all lines A, B,", "(7, 15) COLORS = { \"bg\":\"#EDC08C\", \"border\":\"#B55602\", \"tiles\":(\"#FC9235\", \"#FFB87A\") }", "D7, E7, F7, G7, H7, \\ A8, B8, C8, D8,", "# C: destination sq (0-63) # D: start figure (1-6)", "game display constants -----# DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH = 45", "= 2 #color for onTurnLabel PLAYER_COLOR = [\"white\", \"black\"] #figures", "# B: start sq (0-63) # C: destination sq (0-63)", "E7, F7, G7, H7, \\ A8, B8, C8, D8, E8,", "(0-63) # D: start figure (1-6) # E: captured figure", "# G F E D C B A # #", "G8, H8 = range(64) #----- game display constants -----# DEFAULTBORDERWIDTH", "chr(9817), chr(9816), chr(9815), chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK] = [ 'x',", "\\ A3, B3, C3, D3, E3, F3, G3, H3, \\", "range(8) #all squares A1, B1, C1, D1, E1, F1, G1,", "game constants -----# #players WHITE = 0 BLACK = 1", "\"king\" ] #used in move 32bit for promotion figure prom_figure", "E4, F4, G4, H4, \\ A5, B5, C5, D5, E5,", "#----- game display constants -----# DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH =", "-----# NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP, ENPASSANT_CAPTURE, CASTLING, KING_CAPTURE = range(7)", "F E D C B A # # A: move", "#NAME = (start_bit, lenght) MOVE_TYPE = (0, 3) MOVE_START =", "range(7) #----- move 32bit reservation -----# # a single move", "B A # # A: move type (0-6) # B:", "G F E D C B A # # A:", "A # # A: move type (0-6) # B: start", "G5, H5, \\ A6, B6, C6, D6, E6, F6, G6,", "PLAYER_COLOR = [\"white\", \"black\"] #figures PAWN = 1 KNIGHT =", "DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE = (7, 15)", "45 DEFAULTFONTSIZE = (7, 15) COLORS = { \"bg\":\"#EDC08C\", \"border\":\"#B55602\",", "E, F, G, H = range(8) #all squares A1, B1,", "display constants -----# DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE", "chr(9822), chr(9821), chr(9820), chr(9819), chr(9818)] #AI constants CASTLING_RIGHT_LOSS_PENALTY = -40", "A, B, C, D, E, F, G, H = range(8)", "2 BISHOP = 3 ROOK = 4 QUEEN = 5", "# E: captured figure (1-6) # F: color of moved", "range(64) #----- game display constants -----# DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH", "status -----# IDELING = 0 PICKING = 1 INF =", "A8, B8, C8, D8, E8, F8, G8, H8 = range(64)", "CASTLING_LEFT = 0 CASTLING_RIGHT = 1 #----- player status -----#", "B4, C4, D4, E4, F4, G4, H4, \\ A5, B5,", "castling -----# CASTLING_LEFT = 0 CASTLING_RIGHT = 1 #----- player", "A: move type (0-6) # B: start sq (0-63) #", "\"#FFB87A\") } #----- move types -----# NORMAL_MOVE, CAPTURE, PROMOTION, DOUBLE_STEP,", "-----# DEFAULTBORDERWIDTH = 20 DEFAULTTILEWIDTH = 45 DEFAULTFONTSIZE = (7,", "2 #color for onTurnLabel PLAYER_COLOR = [\"white\", \"black\"] #figures PAWN", "3) MOVE_FIG_CAPTURE = (18, 3) MOVE_COLOR = (21, 1) MOVE_PROM", "chr(9814), chr(9813), chr(9812)] ASCII_FIG[BLACK] = [ 'x', chr(9823), chr(9822), chr(9821),", "\"black\"] #figures PAWN = 1 KNIGHT = 2 BISHOP =" ]
[ "random if __name__ == \"__main__\": env = gym.make(\"Pokemon-v0\") total_reward =", "= env.reset() while True: action = random.randint(-1,8) obs, reward, done,", "steps, total reward of %.2f\" % (total_steps, total_reward)) if done:", "= 0.0 total_steps = 0 obs = env.reset() while True:", "total_steps = 0 obs = env.reset() while True: action =", "== \"__main__\": env = gym.make(\"Pokemon-v0\") total_reward = 0.0 total_steps =", "= gym.make(\"Pokemon-v0\") total_reward = 0.0 total_steps = 0 obs =", "obs, reward, done, _ = env.step(action) total_reward += reward total_steps", "%d steps, total reward of %.2f\" % (total_steps, total_reward)) if", "if __name__ == \"__main__\": env = gym.make(\"Pokemon-v0\") total_reward = 0.0", "\"__main__\": env = gym.make(\"Pokemon-v0\") total_reward = 0.0 total_steps = 0", "+= reward total_steps += 1 print(\"Currently %d steps, total reward", "__name__ == \"__main__\": env = gym.make(\"Pokemon-v0\") total_reward = 0.0 total_steps", "reward total_steps += 1 print(\"Currently %d steps, total reward of", "import random if __name__ == \"__main__\": env = gym.make(\"Pokemon-v0\") total_reward", "0 obs = env.reset() while True: action = random.randint(-1,8) obs,", "gym_pokemon import random if __name__ == \"__main__\": env = gym.make(\"Pokemon-v0\")", "env = gym.make(\"Pokemon-v0\") total_reward = 0.0 total_steps = 0 obs", "1 print(\"Currently %d steps, total reward of %.2f\" % (total_steps,", "while True: action = random.randint(-1,8) obs, reward, done, _ =", "action = random.randint(-1,8) obs, reward, done, _ = env.step(action) total_reward", "import gym_pokemon import random if __name__ == \"__main__\": env =", "gym.make(\"Pokemon-v0\") total_reward = 0.0 total_steps = 0 obs = env.reset()", "env.step(action) total_reward += reward total_steps += 1 print(\"Currently %d steps,", "= random.randint(-1,8) obs, reward, done, _ = env.step(action) total_reward +=", "random.randint(-1,8) obs, reward, done, _ = env.step(action) total_reward += reward", "print(\"Currently %d steps, total reward of %.2f\" % (total_steps, total_reward))", "done, _ = env.step(action) total_reward += reward total_steps += 1", "total reward of %.2f\" % (total_steps, total_reward)) if done: break", "= 0 obs = env.reset() while True: action = random.randint(-1,8)", "reward, done, _ = env.step(action) total_reward += reward total_steps +=", "0.0 total_steps = 0 obs = env.reset() while True: action", "+= 1 print(\"Currently %d steps, total reward of %.2f\" %", "gym import gym_pokemon import random if __name__ == \"__main__\": env", "import gym import gym_pokemon import random if __name__ == \"__main__\":", "total_steps += 1 print(\"Currently %d steps, total reward of %.2f\"", "total_reward += reward total_steps += 1 print(\"Currently %d steps, total", "<reponame>kapzlok2408/Pokemon-Showdown-Node-Bot<gh_stars>0 import gym import gym_pokemon import random if __name__ ==", "_ = env.step(action) total_reward += reward total_steps += 1 print(\"Currently", "env.reset() while True: action = random.randint(-1,8) obs, reward, done, _", "obs = env.reset() while True: action = random.randint(-1,8) obs, reward,", "= env.step(action) total_reward += reward total_steps += 1 print(\"Currently %d", "True: action = random.randint(-1,8) obs, reward, done, _ = env.step(action)", "total_reward = 0.0 total_steps = 0 obs = env.reset() while" ]
[ "print('O {} é maior que4 {}'.format(num2, num1)) else: print('Os números", "número: ')) if num1 > num2: print('O {} é maior", "<reponame>victor-da-costa/Aprendendo-Python<filename>Curso-Em-Video-Python/Mundo-2/EXs/EX038.py num1 = int(input('Digite o 1º número: ')) num2 =", "num2: print('O {} é maior que {}'.format(num1, num2)) elif num1", "> num2: print('O {} é maior que {}'.format(num1, num2)) elif", "num2 = int(input('Digite o 2º número: ')) if num1 >", "1º número: ')) num2 = int(input('Digite o 2º número: '))", "número: ')) num2 = int(input('Digite o 2º número: ')) if", "print('O {} é maior que {}'.format(num1, num2)) elif num1 <", "é maior que {}'.format(num1, num2)) elif num1 < num2: print('O", "< num2: print('O {} é maior que4 {}'.format(num2, num1)) else:", "é maior que4 {}'.format(num2, num1)) else: print('Os números são iguais')", "= int(input('Digite o 1º número: ')) num2 = int(input('Digite o", "= int(input('Digite o 2º número: ')) if num1 > num2:", "num2)) elif num1 < num2: print('O {} é maior que4", "{} é maior que4 {}'.format(num2, num1)) else: print('Os números são", "2º número: ')) if num1 > num2: print('O {} é", "int(input('Digite o 2º número: ')) if num1 > num2: print('O", "{} é maior que {}'.format(num1, num2)) elif num1 < num2:", "que {}'.format(num1, num2)) elif num1 < num2: print('O {} é", "{}'.format(num1, num2)) elif num1 < num2: print('O {} é maior", "if num1 > num2: print('O {} é maior que {}'.format(num1,", "num1 > num2: print('O {} é maior que {}'.format(num1, num2))", "')) num2 = int(input('Digite o 2º número: ')) if num1", "num2: print('O {} é maior que4 {}'.format(num2, num1)) else: print('Os", "num1 < num2: print('O {} é maior que4 {}'.format(num2, num1))", "maior que {}'.format(num1, num2)) elif num1 < num2: print('O {}", "int(input('Digite o 1º número: ')) num2 = int(input('Digite o 2º", "elif num1 < num2: print('O {} é maior que4 {}'.format(num2,", "o 2º número: ')) if num1 > num2: print('O {}", "o 1º número: ')) num2 = int(input('Digite o 2º número:", "')) if num1 > num2: print('O {} é maior que", "num1 = int(input('Digite o 1º número: ')) num2 = int(input('Digite" ]
[ "description=( 'Python file chooser widget for use in ' 'Jupyter/IPython", "setuptools import setup, find_packages def read(fname): \"\"\"Open files relative to", "from setuptools import setup, find_packages def read(fname): \"\"\"Open files relative", "- Beta', 'Intended Audience :: Developers', ], install_requires=[ 'ipywidgets' ]", "files relative to package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser', version='0.3.1',", "'License :: OSI Approved :: MIT License', 'Operating System ::", ":: OSI Approved :: MIT License', 'Operating System :: OS", "import setup, find_packages def read(fname): \"\"\"Open files relative to package.\"\"\"", "for use in ' 'Jupyter/IPython in conjunction with ipywidgets' ),", ":: 3', 'License :: OSI Approved :: MIT License', 'Operating", "relative to package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser', version='0.3.1', author='<NAME>", "classifiers=[ 'Programming Language :: Python :: 3', 'License :: OSI", "'Python file chooser widget for use in ' 'Jupyter/IPython in", "return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>', description=(", "import os from setuptools import setup, find_packages def read(fname): \"\"\"Open", "in ' 'Jupyter/IPython in conjunction with ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown',", "author='<NAME> (@crahan)', author_email='<EMAIL>', description=( 'Python file chooser widget for use", "os from setuptools import setup, find_packages def read(fname): \"\"\"Open files", "'Operating System :: OS Independent', 'Development Status :: 4 -", "\"\"\"Open files relative to package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser',", "' 'Jupyter/IPython in conjunction with ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser',", "System :: OS Independent', 'Development Status :: 4 - Beta',", "name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>', description=( 'Python file chooser widget", "'Programming Language :: Python :: 3', 'License :: OSI Approved", ":: MIT License', 'Operating System :: OS Independent', 'Development Status", "chooser widget for use in ' 'Jupyter/IPython in conjunction with", "4 - Beta', 'Intended Audience :: Developers', ], install_requires=[ 'ipywidgets'", "setup( name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>', description=( 'Python file chooser", "OS Independent', 'Development Status :: 4 - Beta', 'Intended Audience", "Beta', 'Intended Audience :: Developers', ], install_requires=[ 'ipywidgets' ] )", "long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python", ":: OS Independent', 'Development Status :: 4 - Beta', 'Intended", "(@crahan)', author_email='<EMAIL>', description=( 'Python file chooser widget for use in", "package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>',", "version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>', description=( 'Python file chooser widget for", "file chooser widget for use in ' 'Jupyter/IPython in conjunction", "Language :: Python :: 3', 'License :: OSI Approved ::", "python import os from setuptools import setup, find_packages def read(fname):", "setup, find_packages def read(fname): \"\"\"Open files relative to package.\"\"\" return", "find_packages def read(fname): \"\"\"Open files relative to package.\"\"\" return open(os.path.join(os.path.dirname(__file__),", "author_email='<EMAIL>', description=( 'Python file chooser widget for use in '", "with ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[ 'Programming", "OSI Approved :: MIT License', 'Operating System :: OS Independent',", "Status :: 4 - Beta', 'Intended Audience :: Developers', ],", "url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python :: 3',", "'Jupyter/IPython in conjunction with ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT',", "in conjunction with ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(),", "#!/usr/bin/env python import os from setuptools import setup, find_packages def", "open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>', description=( 'Python", "def read(fname): \"\"\"Open files relative to package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read()", "read(fname): \"\"\"Open files relative to package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(", "License', 'Operating System :: OS Independent', 'Development Status :: 4", "'Development Status :: 4 - Beta', 'Intended Audience :: Developers',", ":: 4 - Beta', 'Intended Audience :: Developers', ], install_requires=[", "widget for use in ' 'Jupyter/IPython in conjunction with ipywidgets'", "to package.\"\"\" return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)',", "packages=find_packages(), classifiers=[ 'Programming Language :: Python :: 3', 'License ::", ":: Python :: 3', 'License :: OSI Approved :: MIT", "license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python :: 3', 'License", "Python :: 3', 'License :: OSI Approved :: MIT License',", "Independent', 'Development Status :: 4 - Beta', 'Intended Audience ::", "MIT License', 'Operating System :: OS Independent', 'Development Status ::", "), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language ::", "conjunction with ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[", "long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language :: Python ::", "Approved :: MIT License', 'Operating System :: OS Independent', 'Development", "ipywidgets' ), long_description=read('README.md'), long_description_content_type='text/markdown', url='https://github.com/crahan/ipyfilechooser', license='MIT', packages=find_packages(), classifiers=[ 'Programming Language", "3', 'License :: OSI Approved :: MIT License', 'Operating System", "fname)).read() setup( name='ipyfilechooser', version='0.3.1', author='<NAME> (@crahan)', author_email='<EMAIL>', description=( 'Python file", "use in ' 'Jupyter/IPython in conjunction with ipywidgets' ), long_description=read('README.md')," ]
[ "in q: if now_timestamp - sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name +", "not blob_info: self.error(404) return self.send_blob(blob_info) application = webapp.WSGIApplication( [('/', MainAction),", "self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information about memory suppressions.\"\"\" def", "= self.request.get('sort') query = app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp =',", "('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json', BuildStepJSONAction),", "# found in the LICENSE file. import appengine_config import datetime", "all processing to the # background. try: deferred.defer(app.process_status_push, self.request.body, _queue='fast')", "ViewRawLogAction)]) def main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60)", "query = app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp')", "{ 'gtest_query': self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results': gtest_results, }) class SuppressionQueryAction(MyRequestHandler):", "query.filter('is_fetched =', True) query.filter('is_too_large =', False) deferred.defer(app.for_all_entities, query, app.update_parsed_data, None)", "(TypeError, ValueError), _: return default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler", "now_timestamp = datetime.datetime.now() queries = [] for line in self.request.body.splitlines():", "GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json',", "space works. 'time_started': bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep=' '), } for", "is governed by a BSD-style license that can be #", "bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder': bs.builder, 'status': bs.status, 'step_number': bs.step_number, 'step_name':", "= datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' % sort) if", "gtest_results = query.fetch(PAGE_SIZE) cursor = query.cursor() self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'),", "get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000)", "import db from google.appengine.ext import deferred from google.appengine.ext import webapp", "app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' %", "all_steps.fetch(limit=1000) json_data = { 'build_steps': [ { 'build_number': bs.build_number, 'buildbot_root':", "HTTP 500 # because buildbot will try again. app.process_status_push(self.request.body) class", "('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction),", "UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp = datetime.datetime.now() queries = [] for", "# BigQuery doesn't recognize the T separator, but space works.", "'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information about", "many to display on one page.\"\"\" PAGE_SIZE = 100 def", "in build_steps ], 'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays", "from google.appengine.ext import db from google.appengine.ext import deferred from google.appengine.ext", "For large requests we have to do it now. We", "self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries':", "get(self): query = app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True) query.filter('is_too_large =', False)", "ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log file to the user.\"\"\" def get(self,", "from google.appengine.ext import webapp from google.appengine.ext.webapp import blobstore_handlers from google.appengine.ext.webapp", "code is governed by a BSD-style license that can be", "get(self): query = app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'):", "again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def", "found in the LICENSE file. import appengine_config import datetime import", "deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def get(self): query = app.BuildStep.all(keys_only=True) query.filter('is_fetched =',", "2011 The Chromium Authors. All rights reserved. # Use of", "self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name', ''), 'filter_status': self.request.get('status',", "governed by a BSD-style license that can be # found", "query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' % sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor'))", "UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps',", "webapp from google.appengine.ext.webapp import blobstore_handlers from google.appengine.ext.webapp import template from", "Use of this source code is governed by a BSD-style", "doesn't get stuck on us. Defer all processing to the", "import json import logging import os.path import pickle import sys", "None) class MainAction(MyRequestHandler): def get(self): self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler): def", "None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name'))", "query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE) cursor = query.cursor()", "query.order('monthly_timestamp') query.order('-%s' % sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE)", "('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def main():", "get(self): sort = 'count' if self.request.get('sort') in ('count',): sort =", "queries: for sr in q: if now_timestamp - sr.time_finished >", "q: if now_timestamp - sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n')", "from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app import cloudstorage", "works. 'time_started': bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep=' '), } for bs", "- sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n') class ListAction(MyRequestHandler): \"\"\"Lists", "'\\n') class ListAction(MyRequestHandler): \"\"\"Lists stored build results.\"\"\" def get(self): all_steps", "# pylint: disable=pointless-string-statement \"\"\"When displaying a list of results, how", "# because buildbot will try again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def", "def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root')))", "= all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names':", "helpers.\"\"\" def _render_template(self, name, values): \"\"\" Wrapper for template.render that", "def post(self): # This handler should be extremely fast so", "suppression_summaries, 'cursor': query.cursor(), 'sort': sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected", "class StatusReceiverAction(MyRequestHandler): def post(self): # This handler should be extremely", "class UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp = datetime.datetime.now() queries = []", "app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps': steps, 'cursor':", "for q in queries: for sr in q: if now_timestamp", "'filter_builder': self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name', ''), 'filter_status': self.request.get('status', ''), })", "import template from google.appengine.ext.webapp.util import run_wsgi_app import cloudstorage import app", "= query.fetch(PAGE_SIZE) cursor = query.cursor() self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'), 'cursor':", "class MainAction(MyRequestHandler): def get(self): self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler): def get(self):", "query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE) cursor = query.cursor() self._render_template('query.html', { 'gtest_query':", "all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000) json_data", "''), 'filter_step_name': self.request.get('step_name', ''), 'filter_status': self.request.get('status', ''), }) class BuildStepJSONAction(MyRequestHandler):", "{ 'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder': bs.builder, 'status': bs.status, 'step_number':", "= query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results': suppression_results,", "}) class BuildStepJSONAction(MyRequestHandler): def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('cursor'):", "os.path import pickle import sys import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party'))", "'builder': bs.builder, 'status': bs.status, 'step_number': bs.step_number, 'step_name': bs.step_name, # BigQuery", "with this application specific helpers.\"\"\" def _render_template(self, name, values): \"\"\"", "sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query':", "blobkey): # pylint: disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info:", "that updates response and knows where to look for templates.", "'cursor': query.cursor(), 'sort': sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log", "results, how many to display on one page.\"\"\" PAGE_SIZE =", "google.appengine.ext import deferred from google.appengine.ext import webapp from google.appengine.ext.webapp import", "= app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s'", "_clean_int(urllib.unquote( self.request.get('status')), None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE) step_names", "rights reserved. # Use of this source code is governed", "class FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def get(self): query", "T separator, but space works. 'time_started': bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep='", "def main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params)", "\"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name), values)) class StatusReceiverAction(MyRequestHandler): def post(self):", "_clean_int(value, default): \"\"\"Convert a value to an int, or the", "# This handler should be extremely fast so that buildbot", "sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n') class ListAction(MyRequestHandler): \"\"\"Lists stored", "= app.MemorySuppressionResult.all() query.filter('name =', line) query.order('-time_finished') queries.append(query.run(limit=1)) for q in", "if self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote(", "in queries: for sr in q: if now_timestamp - sr.time_finished", "initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application) if __name__ == '__main__':", "cursor, 'gtest_results': gtest_results, }) class SuppressionQueryAction(MyRequestHandler): def get(self): query =", "build_steps ], 'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary", "} self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information about memory suppressions.\"\"\"", "app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def get(self):", "self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results': suppression_results, }) class", "get(self, blobkey): # pylint: disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not", "ListAction(MyRequestHandler): \"\"\"Lists stored build results.\"\"\" def get(self): all_steps = app.BuildStep.all().order('-time_finished')", "import deferred from google.appengine.ext import webapp from google.appengine.ext.webapp import blobstore_handlers", "get(self): self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler): def get(self): gtest_results = []", "background. try: deferred.defer(app.process_status_push, self.request.body, _queue='fast') except Exception: # For large", "buildbot will try again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders)", "if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'),", "('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction),", "\"\"\"Displays summary information about memory suppressions.\"\"\" def get(self): sort =", "\"\"\"When displaying a list of results, how many to display", "pylint: disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info: self.error(404) return", "('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5,", "line in self.request.body.splitlines(): query = app.MemorySuppressionResult.all() query.filter('name =', line) query.order('-time_finished')", "bs in build_steps ], 'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler):", "stored build results.\"\"\" def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'):", "\"\"\"Sends selected log file to the user.\"\"\" def get(self, blobkey):", "if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'),", "step_names, 'steps': steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder',", "for bs in build_steps ], 'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class", "datetime.datetime.now() queries = [] for line in self.request.body.splitlines(): query =", "query = app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True) query.filter('is_too_large =', False) deferred.defer(app.for_all_entities,", "= query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor': query.cursor(),", "self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results': gtest_results, }) class SuppressionQueryAction(MyRequestHandler): def get(self):", "UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0,", "queries.append(query.run(limit=1)) for q in queries: for sr in q: if", "all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')), None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps =", "= app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder", "os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext import blobstore from google.appengine.ext import db", "suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor':", "= blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info: self.error(404) return self.send_blob(blob_info) application =", "all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names,", "query.fetch(PAGE_SIZE) cursor = query.cursor() self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'), 'cursor': cursor,", "self.request.get('status', ''), }) class BuildStepJSONAction(MyRequestHandler): def get(self): all_steps = app.BuildStep.all().order('-time_finished')", "= [] for line in self.request.body.splitlines(): query = app.MemorySuppressionResult.all() query.filter('name", "on one page.\"\"\" PAGE_SIZE = 100 def _clean_int(value, default): \"\"\"Convert", "class SuppressionQueryAction(MyRequestHandler): def get(self): query = app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query'))", "500 # because buildbot will try again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler):", "will try again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders) class", "response and knows where to look for templates. \"\"\" self.response.out.write(template.render(", "appengine_config import datetime import json import logging import os.path import", "Wrapper for template.render that updates response and knows where to", "SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information about memory suppressions.\"\"\" def get(self): sort", "license that can be # found in the LICENSE file.", "datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' % sort) if self.request.get('cursor'):", "line) query.order('-time_finished') queries.append(query.run(limit=1)) for q in queries: for sr in", "SuppressionQueryAction(MyRequestHandler): def get(self): query = app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished')", "urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status =',", "blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info: self.error(404) return self.send_blob(blob_info) application", "to display on one page.\"\"\" PAGE_SIZE = 100 def _clean_int(value,", "], 'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information", "'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name',", "if self.request.get('sort') in ('count',): sort = self.request.get('sort') query = app.MemorySuppressionSummary.all()", "app import gtest_parser # pylint: disable=pointless-string-statement \"\"\"When displaying a list", "if self.request.get('gtest_query'): query = app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished') if", "'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name', ''), 'filter_status':", "BSD-style license that can be # found in the LICENSE", "app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder =',", "query.cursor(), 'suppression_results': suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp =", "# Use of this source code is governed by a", "=', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status", "for template.render that updates response and knows where to look", "webapp.WSGIApplication( [('/', MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions',", "app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps': steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''),", "[] cursor = None if self.request.get('gtest_query'): query = app.GTestResult.all() query.filter('fullname", "def get(self): sort = 'count' if self.request.get('sort') in ('count',): sort", "blob_info: self.error(404) return self.send_blob(blob_info) application = webapp.WSGIApplication( [('/', MainAction), ('/gtest_query',", "large requests we have to do it now. We can't", "\"\"\"Base request handler with this application specific helpers.\"\"\" def _render_template(self,", "StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def", "'cursor': query.cursor(), 'suppression_results': suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp", "import run_wsgi_app import cloudstorage import app import gtest_parser # pylint:", "self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results': gtest_results, }) class", "cursor = query.cursor() self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results':", "'suppression_results': suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp = datetime.datetime.now()", "main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application)", "= all_steps.fetch(limit=1000) json_data = { 'build_steps': [ { 'build_number': bs.build_number,", "max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application) if __name__ == '__main__': main()", "'status': bs.status, 'step_number': bs.step_number, 'step_name': bs.step_name, # BigQuery doesn't recognize", "requests we have to do it now. We can't return", "FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def get(self): query =", "# Copyright (c) 2011 The Chromium Authors. All rights reserved.", "% sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', {", "def _clean_int(value, default): \"\"\"Convert a value to an int, or", "to the # background. try: deferred.defer(app.process_status_push, self.request.body, _queue='fast') except Exception:", "doesn't fail # the push and doesn't get stuck on", "('/viewlog/raw/(.*)', ViewRawLogAction)]) def main(): my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2,", "a BSD-style license that can be # found in the", "an int, or the default value if conversion fails.\"\"\" try:", "deferred.defer(app.for_all_entities, query, app.update_parsed_data, None) class MainAction(MyRequestHandler): def get(self): self._render_template('main.html', {})", "where to look for templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name),", "recognize the T separator, but space works. 'time_started': bs.time_started.isoformat(sep=' '),", "get stuck on us. Defer all processing to the #", "None if self.request.get('gtest_query'): query = app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished')", "True) query.filter('is_too_large =', False) deferred.defer(app.for_all_entities, query, app.update_parsed_data, None) class MainAction(MyRequestHandler):", "+ '\\n') class ListAction(MyRequestHandler): \"\"\"Lists stored build results.\"\"\" def get(self):", "Copyright (c) 2011 The Chromium Authors. All rights reserved. #", "urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name =',", "application specific helpers.\"\"\" def _render_template(self, name, values): \"\"\" Wrapper for", "'sort': sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log file to", "a value to an int, or the default value if", "self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name), values)) class StatusReceiverAction(MyRequestHandler): def post(self): #", "query.cursor() self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results': gtest_results, })", "bs.step_name, # BigQuery doesn't recognize the T separator, but space", "value to an int, or the default value if conversion", "stuck on us. Defer all processing to the # background.", "google.appengine.ext import db from google.appengine.ext import deferred from google.appengine.ext import", "= webapp.WSGIApplication( [('/', MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction),", "page.\"\"\" PAGE_SIZE = 100 def _clean_int(value, default): \"\"\"Convert a value", "for sr in q: if now_timestamp - sr.time_finished > datetime.timedelta(days=30):", "logging import os.path import pickle import sys import urllib sys.path.append(", "us. Defer all processing to the # background. try: deferred.defer(app.process_status_push,", "(c) 2011 The Chromium Authors. All rights reserved. # Use", "BigQuery doesn't recognize the T separator, but space works. 'time_started':", "application = webapp.WSGIApplication( [('/', MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary',", "self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')),", "summary information about memory suppressions.\"\"\" def get(self): sort = 'count'", "the user.\"\"\" def get(self, blobkey): # pylint: disable=arguments-differ blob_info =", "from google.appengine.ext.webapp.util import run_wsgi_app import cloudstorage import app import gtest_parser", "self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler): def get(self): gtest_results = [] cursor", "pickle import sys import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext", "the # background. try: deferred.defer(app.process_status_push, self.request.body, _queue='fast') except Exception: #", "class UpdateParsedDataAction(MyRequestHandler): def get(self): query = app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True)", "except (TypeError, ValueError), _: return default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request", "post(self): now_timestamp = datetime.datetime.now() queries = [] for line in", "os.path.join(os.path.dirname(__file__), 'templates', name), values)) class StatusReceiverAction(MyRequestHandler): def post(self): # This", "import datetime import json import logging import os.path import pickle", "suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp = datetime.datetime.now() queries", "how many to display on one page.\"\"\" PAGE_SIZE = 100", "datetime import json import logging import os.path import pickle import", "run_wsgi_app import cloudstorage import app import gtest_parser # pylint: disable=pointless-string-statement", "knows where to look for templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates',", "query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(),", "the T separator, but space works. 'time_started': bs.time_started.isoformat(sep=' '), 'time_finished':", "('count',): sort = self.request.get('sort') query = app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1)", "int, or the default value if conversion fails.\"\"\" try: return", "'), } for bs in build_steps ], 'cursor': all_steps.cursor(), }", "query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE)", "def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps =", "queries = [] for line in self.request.body.splitlines(): query = app.MemorySuppressionResult.all()", "self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE) cursor =", "MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list',", "pylint: disable=pointless-string-statement \"\"\"When displaying a list of results, how many", "_: return default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler with this", "self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor': query.cursor(), 'sort': sort,", "FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def main(): my_default_retry_params = cloudstorage.RetryParams(", "extremely fast so that buildbot doesn't fail # the push", "file. import appengine_config import datetime import json import logging import", "_queue='fast') except Exception: # For large requests we have to", "self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', {", "import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext import blobstore from", "values)) class StatusReceiverAction(MyRequestHandler): def post(self): # This handler should be", "BuildStepJSONAction(MyRequestHandler): def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps", "= query.cursor() self._render_template('query.html', { 'gtest_query': self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results': gtest_results,", "now. We can't return HTTP 500 # because buildbot will", "q in queries: for sr in q: if now_timestamp -", "this source code is governed by a BSD-style license that", "if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000) json_data = { 'build_steps':", "return self.send_blob(blob_info) application = webapp.WSGIApplication( [('/', MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query',", "sys import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext import blobstore", "look for templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name), values)) class", "gtest_parser # pylint: disable=pointless-string-statement \"\"\"When displaying a list of results,", "on us. Defer all processing to the # background. try:", "SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders',", "from google.appengine.ext.webapp import blobstore_handlers from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util", "google.appengine.ext import webapp from google.appengine.ext.webapp import blobstore_handlers from google.appengine.ext.webapp import", "class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler with this application specific helpers.\"\"\"", "ValueError), _: return default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler with", "about memory suppressions.\"\"\" def get(self): sort = 'count' if self.request.get('sort')", "list of results, how many to display on one page.\"\"\"", "steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder', ''), 'filter_step_name':", "query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor': query.cursor(), 'sort':", "app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results =", "source code is governed by a BSD-style license that can", "import app import gtest_parser # pylint: disable=pointless-string-statement \"\"\"When displaying a", "Authors. All rights reserved. # Use of this source code", "file to the user.\"\"\" def get(self, blobkey): # pylint: disable=arguments-differ", "now_timestamp - sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n') class ListAction(MyRequestHandler):", "if not blob_info: self.error(404) return self.send_blob(blob_info) application = webapp.WSGIApplication( [('/',", "build_steps = all_steps.fetch(limit=1000) json_data = { 'build_steps': [ { 'build_number':", "to an int, or the default value if conversion fails.\"\"\"", "class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information about memory suppressions.\"\"\" def get(self):", "bs.status, 'step_number': bs.step_number, 'step_name': bs.step_name, # BigQuery doesn't recognize the", "def _render_template(self, name, values): \"\"\" Wrapper for template.render that updates", "memory suppressions.\"\"\" def get(self): sort = 'count' if self.request.get('sort') in", "self.request.body, _queue='fast') except Exception: # For large requests we have", "values): \"\"\" Wrapper for template.render that updates response and knows", "to the user.\"\"\" def get(self, blobkey): # pylint: disable=arguments-differ blob_info", "self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000) json_data = { 'build_steps': [", "app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True) query.filter('is_too_large =', False) deferred.defer(app.for_all_entities, query, app.update_parsed_data,", "# background. try: deferred.defer(app.process_status_push, self.request.body, _queue='fast') except Exception: # For", "bs.step_number, 'step_name': bs.step_name, # BigQuery doesn't recognize the T separator,", "datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n') class ListAction(MyRequestHandler): \"\"\"Lists stored build results.\"\"\"", "blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info: self.error(404) return self.send_blob(blob_info) application = webapp.WSGIApplication(", "specific helpers.\"\"\" def _render_template(self, name, values): \"\"\" Wrapper for template.render", "in the LICENSE file. import appengine_config import datetime import json", "StatusReceiverAction(MyRequestHandler): def post(self): # This handler should be extremely fast", "name), values)) class StatusReceiverAction(MyRequestHandler): def post(self): # This handler should", "app.MemorySuppressionResult.all() query.filter('name =', line) query.order('-time_finished') queries.append(query.run(limit=1)) for q in queries:", "query.filter('is_too_large =', False) deferred.defer(app.for_all_entities, query, app.update_parsed_data, None) class MainAction(MyRequestHandler): def", "name, values): \"\"\" Wrapper for template.render that updates response and", "self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results': suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler): def post(self):", "in ('count',): sort = self.request.get('sort') query = app.MemorySuppressionSummary.all() monthly_timestamp =", "query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html', { 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries,", "log file to the user.\"\"\" def get(self, blobkey): # pylint:", "=', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')), None)) if", "return default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler with this application", "templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name), values)) class StatusReceiverAction(MyRequestHandler): def", "= app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results", "if self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name')))", "a list of results, how many to display on one", "import os.path import pickle import sys import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)),", "urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext import blobstore from google.appengine.ext", "for templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name), values)) class StatusReceiverAction(MyRequestHandler):", "request handler with this application specific helpers.\"\"\" def _render_template(self, name,", "json import logging import os.path import pickle import sys import", "self.request.get('status')), None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE) step_names =", "updates response and knows where to look for templates. \"\"\"", "of this source code is governed by a BSD-style license", "('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction),", "{ 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps': steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root':", "= app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000) json_data =", "app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000) json_data = {", "reserved. # Use of this source code is governed by", "be extremely fast so that buildbot doesn't fail # the", "conversion fails.\"\"\" try: return int(value) except (TypeError, ValueError), _: return", "of results, how many to display on one page.\"\"\" PAGE_SIZE", "} for bs in build_steps ], 'cursor': all_steps.cursor(), } self.response.out.write(json.dumps(json_data))", "query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query':", "}) class SuppressionQueryAction(MyRequestHandler): def get(self): query = app.MemorySuppressionResult.all() query.filter('name =',", "query.order('-time_finished') queries.append(query.run(limit=1)) for q in queries: for sr in q:", "self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')), None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps", "can be # found in the LICENSE file. import appengine_config", "the LICENSE file. import appengine_config import datetime import json import", "default): \"\"\"Convert a value to an int, or the default", "=', self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE) cursor", "sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext import blobstore from google.appengine.ext import", "urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')), None)) if self.request.get('cursor'):", "the default value if conversion fails.\"\"\" try: return int(value) except", "'third_party')) from google.appengine.ext import blobstore from google.appengine.ext import db from", "this application specific helpers.\"\"\" def _render_template(self, name, values): \"\"\" Wrapper", "'step_name': bs.step_name, # BigQuery doesn't recognize the T separator, but", "sr in q: if now_timestamp - sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name", "class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log file to the user.\"\"\" def", "'gtest_query': self.request.get('gtest_query'), 'cursor': cursor, 'gtest_results': gtest_results, }) class SuppressionQueryAction(MyRequestHandler): def", "handler with this application specific helpers.\"\"\" def _render_template(self, name, values):", "suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results':", "selected log file to the user.\"\"\" def get(self, blobkey): #", "all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots':", "'cursor': cursor, 'gtest_results': gtest_results, }) class SuppressionQueryAction(MyRequestHandler): def get(self): query", "= app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True) query.filter('is_too_large =', False) deferred.defer(app.for_all_entities, query,", "}) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log file to the user.\"\"\"", "google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app import cloudstorage import", "all_steps.cursor(), } self.response.out.write(json.dumps(json_data)) class SuppressionSummaryAction(MyRequestHandler): \"\"\"Displays summary information about memory", "=', _clean_int(urllib.unquote( self.request.get('status')), None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE)", "information about memory suppressions.\"\"\" def get(self): sort = 'count' if", "= app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results", "query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE)", "'steps': steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder', ''),", "ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data',", "'time_finished': bs.time_finished.isoformat(sep=' '), } for bs in build_steps ], 'cursor':", "if self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')), None)) if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor'))", "deferred from google.appengine.ext import webapp from google.appengine.ext.webapp import blobstore_handlers from", "if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE) cursor = query.cursor() self._render_template('query.html',", "=', True) query.filter('is_too_large =', False) deferred.defer(app.for_all_entities, query, app.update_parsed_data, None) class", "self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name', ''), 'filter_status': self.request.get('status', ''), }) class", "> datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n') class ListAction(MyRequestHandler): \"\"\"Lists stored build", "or the default value if conversion fails.\"\"\" try: return int(value)", "def get(self, blobkey): # pylint: disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if", "handler should be extremely fast so that buildbot doesn't fail", "{ 'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results': suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler):", "\"\"\"Convert a value to an int, or the default value", "\"\"\" Wrapper for template.render that updates response and knows where", "to look for templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__), 'templates', name), values))", "import blobstore_handlers from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app", "=', self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html',", "'step_number': bs.step_number, 'step_name': bs.step_name, # BigQuery doesn't recognize the T", "'gtest_results': gtest_results, }) class SuppressionQueryAction(MyRequestHandler): def get(self): query = app.MemorySuppressionResult.all()", "processing to the # background. try: deferred.defer(app.process_status_push, self.request.body, _queue='fast') except", "'count' if self.request.get('sort') in ('count',): sort = self.request.get('sort') query =", "build results.\"\"\" def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root", "try: return int(value) except (TypeError, ValueError), _: return default class", "displaying a list of results, how many to display on", "= 100 def _clean_int(value, default): \"\"\"Convert a value to an", "def get(self): self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler): def get(self): gtest_results =", "= 'count' if self.request.get('sort') in ('count',): sort = self.request.get('sort') query", "[ { 'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder': bs.builder, 'status': bs.status,", "self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if", "class BuildStepJSONAction(MyRequestHandler): def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor'))", "('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def main(): my_default_retry_params =", "self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps': steps, 'cursor': all_steps.cursor(),", "{}) class GTestQueryAction(MyRequestHandler): def get(self): gtest_results = [] cursor =", "disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info: self.error(404) return self.send_blob(blob_info)", "'step_names': step_names, 'steps': steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder':", "value if conversion fails.\"\"\" try: return int(value) except (TypeError, ValueError),", "app.update_parsed_data, None) class MainAction(MyRequestHandler): def get(self): self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler):", "FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)]) def main(): my_default_retry_params", "can't return HTTP 500 # because buildbot will try again.", "_render_template(self, name, values): \"\"\" Wrapper for template.render that updates response", "deferred.defer(app.process_status_push, self.request.body, _queue='fast') except Exception: # For large requests we", "query.cursor(), 'sort': sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log file", "if now_timestamp - sr.time_finished > datetime.timedelta(days=30): self.response.out.write(sr.name + '\\n') class", "from google.appengine.ext import deferred from google.appengine.ext import webapp from google.appengine.ext.webapp", "# pylint: disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey)) if not blob_info: self.error(404)", "def get(self): query = app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True) query.filter('is_too_large =',", "all_steps.with_cursor(start_cursor=self.request.get('cursor')) build_steps = all_steps.fetch(limit=1000) json_data = { 'build_steps': [ {", "def get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def get(self): query = app.BuildStep.all(keys_only=True)", "=', False) deferred.defer(app.for_all_entities, query, app.update_parsed_data, None) class MainAction(MyRequestHandler): def get(self):", "=', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name", "steps = all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS,", "[] for line in self.request.body.splitlines(): query = app.MemorySuppressionResult.all() query.filter('name =',", "'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps': steps, 'cursor': all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root',", "MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler with this application specific helpers.\"\"\" def", "bs.builder, 'status': bs.status, 'step_number': bs.step_number, 'step_name': bs.step_name, # BigQuery doesn't", "self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if", "'filter_status': self.request.get('status', ''), }) class BuildStepJSONAction(MyRequestHandler): def get(self): all_steps =", "step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps':", "def get(self): query = app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished') if", "False) deferred.defer(app.for_all_entities, query, app.update_parsed_data, None) class MainAction(MyRequestHandler): def get(self): self._render_template('main.html',", "bs.time_finished.isoformat(sep=' '), } for bs in build_steps ], 'cursor': all_steps.cursor(),", "''), }) class BuildStepJSONAction(MyRequestHandler): def get(self): all_steps = app.BuildStep.all().order('-time_finished') if", "LICENSE file. import appengine_config import datetime import json import logging", "class ListAction(MyRequestHandler): \"\"\"Lists stored build results.\"\"\" def get(self): all_steps =", "query.filter('name =', line) query.order('-time_finished') queries.append(query.run(limit=1)) for q in queries: for", "import sys import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from google.appengine.ext import", "= datetime.datetime.now() queries = [] for line in self.request.body.splitlines(): query", "doesn't recognize the T separator, but space works. 'time_started': bs.time_started.isoformat(sep='", "self.request.get('step_name', ''), 'filter_status': self.request.get('status', ''), }) class BuildStepJSONAction(MyRequestHandler): def get(self):", "and knows where to look for templates. \"\"\" self.response.out.write(template.render( os.path.join(os.path.dirname(__file__),", "my_default_retry_params = cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application) if", "Defer all processing to the # background. try: deferred.defer(app.process_status_push, self.request.body,", "=', line) query.order('-time_finished') queries.append(query.run(limit=1)) for q in queries: for sr", "if conversion fails.\"\"\" try: return int(value) except (TypeError, ValueError), _:", "'buildbot_root': bs.buildbot_root, 'builder': bs.builder, 'status': bs.status, 'step_number': bs.step_number, 'step_name': bs.step_name,", "disable=pointless-string-statement \"\"\"When displaying a list of results, how many to", "''), 'filter_builder': self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name', ''), 'filter_status': self.request.get('status', ''),", "should be extremely fast so that buildbot doesn't fail #", "cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application) if __name__ ==", "get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def get(self): query = app.BuildStep.all(keys_only=True) query.filter('is_fetched", "by a BSD-style license that can be # found in", "all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'):", "gtest_results = [] cursor = None if self.request.get('gtest_query'): query =", "self.request.body.splitlines(): query = app.MemorySuppressionResult.all() query.filter('name =', line) query.order('-time_finished') queries.append(query.run(limit=1)) for", "if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'): all_steps.filter('builder =', urllib.unquote(self.request.get('builder')))", "self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor': query.cursor(), 'sort': sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler):", "bs.buildbot_root, 'builder': bs.builder, 'status': bs.status, 'step_number': bs.step_number, 'step_name': bs.step_name, #", "try again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler):", "import gtest_parser # pylint: disable=pointless-string-statement \"\"\"When displaying a list of", "# For large requests we have to do it now.", "cursor = None if self.request.get('gtest_query'): query = app.GTestResult.all() query.filter('fullname =',", "The Chromium Authors. All rights reserved. # Use of this", "'build_steps': [ { 'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder': bs.builder, 'status':", "query = app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor'))", "'), 'time_finished': bs.time_finished.isoformat(sep=' '), } for bs in build_steps ],", "{ 'build_steps': [ { 'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder': bs.builder,", "separator, but space works. 'time_started': bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep=' '),", "have to do it now. We can't return HTTP 500", "user.\"\"\" def get(self, blobkey): # pylint: disable=arguments-differ blob_info = blobstore.BlobInfo.get(urllib.unquote(blobkey))", "get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if", "query = app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor'))", "all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'): all_steps.filter('status =', _clean_int(urllib.unquote( self.request.get('status')), None))", "We can't return HTTP 500 # because buildbot will try", "self.response.out.write(sr.name + '\\n') class ListAction(MyRequestHandler): \"\"\"Lists stored build results.\"\"\" def", "self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'), 'cursor':", "'filter_step_name': self.request.get('step_name', ''), 'filter_status': self.request.get('status', ''), }) class BuildStepJSONAction(MyRequestHandler): def", "{ 'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor': query.cursor(), 'sort': sort, })", "get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def", "bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep=' '), } for bs in build_steps", "all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =', urllib.unquote(self.request.get('buildbot_root'))) if self.request.get('builder'):", "import cloudstorage import app import gtest_parser # pylint: disable=pointless-string-statement \"\"\"When", "= app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html', { 'buildbot_roots': app.BUILDBOT_ROOTS, 'step_names': step_names, 'steps': steps,", "default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base request handler with this application specific", "query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', { 'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results': suppression_results, })", "app.MemorySuppressionResult.all() query.filter('name =', self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results =", "monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' % sort)", "that can be # found in the LICENSE file. import", "import appengine_config import datetime import json import logging import os.path", "default value if conversion fails.\"\"\" try: return int(value) except (TypeError,", "sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends selected log file to the", "google.appengine.ext import blobstore from google.appengine.ext import db from google.appengine.ext import", "google.appengine.ext.webapp import blobstore_handlers from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import", "post(self): # This handler should be extremely fast so that", "self.request.get('suppression_query')) query.order('-time_finished') if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_results = query.fetch(PAGE_SIZE) self._render_template('suppression_query.html', {", "UpdateParsedDataAction(MyRequestHandler): def get(self): query = app.BuildStep.all(keys_only=True) query.filter('is_fetched =', True) query.filter('is_too_large", "return HTTP 500 # because buildbot will try again. app.process_status_push(self.request.body)", "All rights reserved. # Use of this source code is", "db from google.appengine.ext import deferred from google.appengine.ext import webapp from", "'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder': bs.builder, 'status': bs.status, 'step_number': bs.step_number,", "fail # the push and doesn't get stuck on us.", "because buildbot will try again. app.process_status_push(self.request.body) class FetchBuildersAction(MyRequestHandler): def get(self):", "import blobstore from google.appengine.ext import db from google.appengine.ext import deferred", "the push and doesn't get stuck on us. Defer all", "cloudstorage import app import gtest_parser # pylint: disable=pointless-string-statement \"\"\"When displaying", "return int(value) except (TypeError, ValueError), _: return default class MyRequestHandler(webapp.RequestHandler):", "do it now. We can't return HTTP 500 # because", "100 def _clean_int(value, default): \"\"\"Convert a value to an int,", "BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)',", "template.render that updates response and knows where to look for", "def get(self): gtest_results = [] cursor = None if self.request.get('gtest_query'):", "= cloudstorage.RetryParams( initial_delay=0.5, max_delay=30.0, backoff_factor=2, urlfetch_timeout=60) cloudstorage.set_default_retry_params(my_default_retry_params) run_wsgi_app(application) if __name__", "= None if self.request.get('gtest_query'): query = app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query'))", "that buildbot doesn't fail # the push and doesn't get", "gtest_results, }) class SuppressionQueryAction(MyRequestHandler): def get(self): query = app.MemorySuppressionResult.all() query.filter('name", "self.send_blob(blob_info) application = webapp.WSGIApplication( [('/', MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction),", "= { 'build_steps': [ { 'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root, 'builder':", "import logging import os.path import pickle import sys import urllib", "so that buildbot doesn't fail # the push and doesn't", "import pickle import sys import urllib sys.path.append( os.path.join(os.path.abspath(os.path.dirname(__file__)), 'third_party')) from", "self.error(404) return self.send_blob(blob_info) application = webapp.WSGIApplication( [('/', MainAction), ('/gtest_query', GTestQueryAction),", "try: deferred.defer(app.process_status_push, self.request.body, _queue='fast') except Exception: # For large requests", "and doesn't get stuck on us. Defer all processing to", "}) class UnusedSuppressionsAction(MyRequestHandler): def post(self): now_timestamp = datetime.datetime.now() queries =", "push and doesn't get stuck on us. Defer all processing", "= [] cursor = None if self.request.get('gtest_query'): query = app.GTestResult.all()", "be # found in the LICENSE file. import appengine_config import", "template from google.appengine.ext.webapp.util import run_wsgi_app import cloudstorage import app import", "buildbot doesn't fail # the push and doesn't get stuck", "query.order('-%s' % sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries = query.fetch(PAGE_SIZE) self._render_template('suppression_summary.html',", "MainAction(MyRequestHandler): def get(self): self._render_template('main.html', {}) class GTestQueryAction(MyRequestHandler): def get(self): gtest_results", "('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction),", "blobstore from google.appengine.ext import db from google.appengine.ext import deferred from", "display on one page.\"\"\" PAGE_SIZE = 100 def _clean_int(value, default):", "SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction), ('/list', ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver',", "it now. We can't return HTTP 500 # because buildbot", "self.request.get('sort') query = app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp =', monthly_timestamp)", "except Exception: # For large requests we have to do", "class FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps)", "This handler should be extremely fast so that buildbot doesn't", "Chromium Authors. All rights reserved. # Use of this source", "[('/', MainAction), ('/gtest_query', GTestQueryAction), ('/suppression_query', SuppressionQueryAction), ('/suppression_summary', SuppressionSummaryAction), ('/unused_suppressions', UnusedSuppressionsAction),", "('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction), ('/tasks/update_parsed_data', UpdateParsedDataAction), ('/viewlog/raw/(.*)', ViewRawLogAction)])", "all_steps.cursor(), 'filter_buildbot_root': self.request.get('buildbot_root', ''), 'filter_builder': self.request.get('builder', ''), 'filter_step_name': self.request.get('step_name', ''),", "Exception: # For large requests we have to do it", "monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' % sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries =", "deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler): def get(self):", "one page.\"\"\" PAGE_SIZE = 100 def _clean_int(value, default): \"\"\"Convert a", "but space works. 'time_started': bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep=' '), }", "in self.request.body.splitlines(): query = app.MemorySuppressionResult.all() query.filter('name =', line) query.order('-time_finished') queries.append(query.run(limit=1))", "fast so that buildbot doesn't fail # the push and", "suppressions.\"\"\" def get(self): sort = 'count' if self.request.get('sort') in ('count',):", "query = app.MemorySuppressionResult.all() query.filter('name =', line) query.order('-time_finished') queries.append(query.run(limit=1)) for q", "sort = 'count' if self.request.get('sort') in ('count',): sort = self.request.get('sort')", "fails.\"\"\" try: return int(value) except (TypeError, ValueError), _: return default", "'templates', name), values)) class StatusReceiverAction(MyRequestHandler): def post(self): # This handler", "def post(self): now_timestamp = datetime.datetime.now() queries = [] for line", "import webapp from google.appengine.ext.webapp import blobstore_handlers from google.appengine.ext.webapp import template", "\"\"\"Lists stored build results.\"\"\" def get(self): all_steps = app.BuildStep.all().order('-time_finished') if", "'suppression_summary_query': self.request.get('suppression_summary_query'), 'suppression_summaries': suppression_summaries, 'cursor': query.cursor(), 'sort': sort, }) class", "to do it now. We can't return HTTP 500 #", "'suppression_query': self.request.get('suppression_query'), 'cursor': query.cursor(), 'suppression_results': suppression_results, }) class UnusedSuppressionsAction(MyRequestHandler): def", "results.\"\"\" def get(self): all_steps = app.BuildStep.all().order('-time_finished') if self.request.get('buildbot_root'): all_steps.filter('buildbot_root =',", "for line in self.request.body.splitlines(): query = app.MemorySuppressionResult.all() query.filter('name =', line)", "json_data = { 'build_steps': [ { 'build_number': bs.build_number, 'buildbot_root': bs.buildbot_root,", "'suppression_summaries': suppression_summaries, 'cursor': query.cursor(), 'sort': sort, }) class ViewRawLogAction(blobstore_handlers.BlobstoreDownloadHandler): \"\"\"Sends", "all_steps.filter('builder =', urllib.unquote(self.request.get('builder'))) if self.request.get('step_name'): all_steps.filter('step_name =', urllib.unquote(self.request.get('step_name'))) if self.request.get('status'):", "class GTestQueryAction(MyRequestHandler): def get(self): gtest_results = [] cursor = None", "self.request.get('sort') in ('count',): sort = self.request.get('sort') query = app.MemorySuppressionSummary.all() monthly_timestamp", "google.appengine.ext.webapp.util import run_wsgi_app import cloudstorage import app import gtest_parser #", "from google.appengine.ext import blobstore from google.appengine.ext import db from google.appengine.ext", "PAGE_SIZE = 100 def _clean_int(value, default): \"\"\"Convert a value to", "'time_started': bs.time_started.isoformat(sep=' '), 'time_finished': bs.time_finished.isoformat(sep=' '), } for bs in", "int(value) except (TypeError, ValueError), _: return default class MyRequestHandler(webapp.RequestHandler): \"\"\"Base", "def get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps) class UpdateParsedDataAction(MyRequestHandler):", "if self.request.get('cursor'): all_steps.with_cursor(start_cursor=self.request.get('cursor')) steps = all_steps.fetch(limit=PAGE_SIZE) step_names = app.iterate_large_result(app.StepName.all().order('name')) self._render_template('list.html',", "''), 'filter_status': self.request.get('status', ''), }) class BuildStepJSONAction(MyRequestHandler): def get(self): all_steps", "sort = self.request.get('sort') query = app.MemorySuppressionSummary.all() monthly_timestamp = datetime.date.today().replace(day=1) query.filter('monthly_timestamp", "query, app.update_parsed_data, None) class MainAction(MyRequestHandler): def get(self): self._render_template('main.html', {}) class", "('/list', ListAction), ('/build_step_json', BuildStepJSONAction), ('/status_receiver', StatusReceiverAction), ('/tasks/fetch_builders', FetchBuildersAction), ('/tasks/fetch_steps', FetchStepsAction),", "# the push and doesn't get stuck on us. Defer", "self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) gtest_results = query.fetch(PAGE_SIZE) cursor = query.cursor() self._render_template('query.html', {", "get(self): gtest_results = [] cursor = None if self.request.get('gtest_query'): query", "=', monthly_timestamp) query.order('monthly_timestamp') query.order('-%s' % sort) if self.request.get('cursor'): query.with_cursor(start_cursor=self.request.get('cursor')) suppression_summaries", "GTestQueryAction(MyRequestHandler): def get(self): gtest_results = [] cursor = None if", "we have to do it now. We can't return HTTP", "FetchBuildersAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_builders) class FetchStepsAction(MyRequestHandler): def get(self): deferred.defer(app.fetch_steps) class", "blobstore_handlers from google.appengine.ext.webapp import template from google.appengine.ext.webapp.util import run_wsgi_app import", "self.request.get('gtest_query'): query = app.GTestResult.all() query.filter('fullname =', self.request.get('gtest_query')) query.order('-time_finished') if self.request.get('cursor'):" ]
[ "dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(3,", "as np from caffe2.proto import caffe2_pb2 from caffe2.python import core,", "out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res,", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res,", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @serial.given(**hu.gcs) def test_broadcast_powt(self, gc,", "+ 1.0 Y = np.random.rand(4, 5).astype(np.float32) + 2.0 #two gradients", "absolute_import from __future__ import division from __future__ import print_function from", "# broadcasting with single elem dimensions at both ends X", "1.0 Y = np.random.rand(4, 5).astype(np.float32) + 2.0 #two gradients Y*X^(Y-1)", "\"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0)", "dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(4,", "0 dims to account for broadcast def powt_grad_axis1(g_out, outputs, fwd_inputs):", "op, [X, Y], 1, [0]) # broadcasting with single elem", "= np.random.rand(1, 3, 4, 1).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "op, [X, Y], [0]) @given(**hu.gcs) def test_semantic_broadcast(self, gc, dc): #", "\"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out", "3, 4, 5).astype(np.float32) Y = np.random.rand(3).astype(np.float32) op = core.CreateOperator( \"Add\",", "= np.random.rand(2, 3).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "= np.random.rand(3, 4).astype(np.float32) + 2.0 #pow op with the latter", "= np.random.rand(1, 4, 1).astype(np.float32) + 2.0 #pow op with the", "= np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(3,", "tests for better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self, gc,", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y) self.assertDeviceChecks(dc,", "output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting intermediate dimensions X = np.random.rand(2, 3,", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc, op,", "net.GivenTensorFill([], [\"X\"], values=[], shape=[2, 0, 5]) net.GivenTensorFill([], [\"Y\"], values=[], shape=[2,", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) def ref_op(X, Y): res", "5).astype(np.float32) Y = np.random.rand(3).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"],", "4).astype(np.float32) + 2.0 #pow op with the latter array increased", "[0]) @given(**hu.gcs) def test_broadcast_Sub(self, gc, dc): # Set broadcast and", "[X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) #", "Y], [0]) # broadcasting with single elem dimensions at both", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\",", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc, op, [X,", "import caffe2.python.hypothesis_test_util as hu import caffe2.python.serialized_test.serialized_test_util as serial # TODO(jiayq):", "both ends X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0", "and X^Y * ln(X) def powt_grad(g_out, outputs, fwd_inputs): [X, Y]", "3).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0)", "class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self, gc, dc): # Set broadcast", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\",", "threshold=1e-3) # broadcasting with single elem dimensions at both ends", "default X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3).astype(np.float32)", "Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\",", "Y[np.newaxis, :, :, :]) #two gradients Y*X^(Y-1) and X^Y *", "def test_broadcast_Mul(self, gc, dc): # Set broadcast and no axis,", "division from __future__ import print_function from __future__ import unicode_literals import", "def test_sum_reduce_fp16(self, gc, dc): # Set broadcast and no axis,", "ref_op(X, Y): res = np.sum(X, axis=0) res = np.sum(res, axis=0)", "1.0 Y = np.random.rand(3, 4).astype(np.float32) + 2.0 #pow op with", "workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res, axis=0) np.testing.assert_array_almost_equal(out,", "np.sum(X, axis=0) res = np.sum(res, axis=0) return [res] self.assertReferenceChecks( device_option=gc,", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op,", "i.e. broadcasting last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "op, [X, Y], [0]) # fp64 is not supported with", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y[:, :,", "np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "res = np.sum(X, axis=0) res = np.sum(res, axis=2) return [res.reshape(Y.shape)]", "op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\")", "axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def", "#latter gradient is sumed over 3, 2 and 1 dims", "core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op,", "+ 2.0 #pow op with the latter array increased by", "[X, Y[:, :, np.newaxis]]) return ([GX, np.sum(np.sum(GY, 3), 0)]) op", "[0]) @unittest.skipIf(not workspace.has_gpu_support, \"No gpu support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc,", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32)", "workspace import caffe2.python.hypothesis_test_util as hu import caffe2.python.serialized_test.serialized_test_util as serial #", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, device_option=gc) def ref_op(X,", "np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(3, 4).astype(np.float16) op =", "np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # Set broadcast", "powt_grad_axis1(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY] =", "with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[], shape=[2, 0, 5]) net.GivenTensorFill([], [\"Y\"],", "Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1,", "np.sum(np.sum(GY, 3), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1,", "[X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Sub(self, gc, dc): #", "Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Sub(self, gc, dc): # Set", "0 dims to account for broadcast def powt_grad_broadcast(g_out, outputs, fwd_inputs):", "one dim def powt_op_axis1(X, Y): return powt_op(X, Y[:, :, np.newaxis])", "[X, Y[np.newaxis, :, :, :]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1),", "res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # Set broadcast and", "1 and 0 dims to account for broadcast def powt_grad_broadcast(g_out,", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis,", "outputs, [X, Y[:, np.newaxis, np.newaxis, np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY, 3),", "[X, Y], 1, [0]) # broadcasting the first dimension X", "Y], 1, [0]) @serial.given(**hu.gcs) def test_broadcast_powt(self, gc, dc): np.random.seed(101) #operator", "dc): # NCHW as default X = np.random.rand(2, 3, 4,", "broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4.", "= core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y],", "= workspace.FetchBlob(\"out\") res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0) # broadcasting", "res = np.sum(X, axis=0) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res)", "4).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) def ref_op(X, Y): res", "[0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc, dc): net = core.Net('test') with", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32)", "sumed over 1 and 0 dims to account for broadcast", "order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out,", "= np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(1,", "@unittest.skipIf(not workspace.has_gpu_support, \"No gpu support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc, dc):", "np.random.rand(2, 3).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1,", "for broadcast def powt_grad_mixed(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs", "np.sum(X, axis=3) res = np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc,", "Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\",", "+ Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0])", "axis=3) res = np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc, op=op,", "axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(", "\"Y\"], \"out\", broadcast=1, axis=0) def ref_op(X, Y): res = np.sum(X,", "Y], 1, [0]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting", "[0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Sub(self,", "np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op,", "np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "X * Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y],", "Y], reference=ref_op, threshold=1e-3) # Set broadcast and no axis, i.e.", "core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting the first dimension", "Y): return powt_op(X, Y[np.newaxis, :, :, :]) #two gradients Y*X^(Y-1)", "from __future__ import unicode_literals import unittest from hypothesis import given", "broadcast=1, axis=1) def ref_op(X, Y): res = np.sum(X, axis=0) res", "4, 5).astype(np.float32) + 1.0 Y = np.random.rand(1, 4, 1).astype(np.float32) +", "Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting the first dimension X", "dc if d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0])", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1, 4,", "4, 5).astype(np.float16) Y = np.random.rand(3, 4).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\",", "Y): return [np.power(X, Y)] #two gradients Y*X^(Y-1) and X^Y *", "axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self, gc, dc): # Set broadcast", "make them hypothesis tests for better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs)", "[\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "X + Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y],", "= np.sum(X, axis=0) res = np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc,", "= powt_grad(g_out, outputs, [X, Y[:, np.newaxis, np.newaxis, np.newaxis]]) return ([GX,", "Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"],", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, device_option=gc) def ref_op(X, Y): res", "np.random.rand(3).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\")", "res = np.sum(X, axis=3) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res,", "4, 5).astype(np.float16) Y = np.random.rand(4, 5).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\",", "np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\",", "Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y],", "[\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_mixed,", "\"out\", broadcast=1, axis=1) def ref_op(X, Y): res = np.sum(X, axis=0)", "5).astype(np.float16) Y = np.random.rand(3, 4).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "import absolute_import from __future__ import division from __future__ import print_function", "broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\",", "Y = np.random.rand(3).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\",", "Y], [0]) # NHWC X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\",", "op, [X, Y], [0]) # Set broadcast and no axis,", ":, :, :]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0), (1,", "np.newaxis]) #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter gradient", "gc, dc): np.random.seed(101) #operator def powt_op(X, Y): return [np.power(X, Y)]", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y[:,", "hu import caffe2.python.serialized_test.serialized_test_util as serial # TODO(jiayq): make them hypothesis", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X - Y[:, np.newaxis,", "np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting intermediate", "reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self, gc, dc): # broadcasting", "op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X)", "[X, Y], [0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc, dc): net =", "[GX, GY] = powt_grad(g_out, outputs, fwd_inputs) return ([GX, np.sum(np.sum(GY, 1),", "return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0), (1, 4, 1))]) op", "1).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) def", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0])", "1).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", "Y = np.random.rand(2, 3).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "out = workspace.FetchBlob(\"out\") res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0) #", "op=op, inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self, gc,", "dimensions at both ends X = np.random.rand(2, 3, 4, 5).astype(np.float16)", "fwd_inputs): [X, Y] = fwd_inputs [GX, GY] = powt_grad(g_out, outputs,", "caffe2.proto import caffe2_pb2 from caffe2.python import core, workspace import caffe2.python.hypothesis_test_util", "workspace.FetchBlob(\"out\") res = np.sum(X, axis=3) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out,", "dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(2,", "[0]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting with single", "np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0) # broadcasting with single elem dimensions", "= np.random.rand(3).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1,", "Y], 1, [0]) # broadcasting the first dimension X =", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Mul(self, gc,", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0)", "with the latter array increased by one dim def powt_op_axis1(X,", "4, 5).astype(np.float32) Y = np.random.rand(3).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\",", "[\"X\", \"Y\"], \"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis0,", "= np.random.rand(2, 3, 4, 500).astype(np.float64) Y = np.random.rand(1).astype(np.float64) op =", "res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc, op, [X,", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=0)", "def test_broadcast_powt(self, gc, dc): np.random.seed(101) #operator def powt_op(X, Y): return", "np.testing.assert_array_almost_equal( out, X - Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op,", "4, 5).astype(np.float32) + 1.0 Y = np.random.rand(2).astype(np.float32) + 2.0 #pow", "axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3)", "import core, workspace import caffe2.python.hypothesis_test_util as hu import caffe2.python.serialized_test.serialized_test_util as", "X + Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0])", "1).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", "#1. Set broadcast and no axis, i.e. broadcasting last dimensions.", "np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "both ends X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y =", ":]) #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter gradient", "X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op,", "np.sum(np.sum(np.sum(GY, 3), 2), 1)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\",", "Y[:, :, np.newaxis]]) return ([GX, np.sum(np.sum(GY, 3), 0)]) op =", "np.sum(X, axis=0) res = np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op,", "= np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(3, 4).astype(np.float16) op", "op, [X, Y], 1, [0]) self.assertDeviceChecks(dc, op, [X, Y], [0])", "@given(**hu.gcs) def test_broadcast_scalar(self, gc, dc): # broadcasting constant X =", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op =", "@given(**hu.gcs) def test_broadcast_Add(self, gc, dc): # Set broadcast and no", ":, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) self.assertDeviceChecks(dc, op,", "= np.random.rand(3, 4).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "5).astype(np.float16) Y = np.random.rand(4, 5).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X *", "[X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @serial.given(**hu.gcs)", "Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "def test_broadcast_scalar(self, gc, dc): # broadcasting constant X = np.random.rand(2,", "__future__ import unicode_literals import unittest from hypothesis import given import", "core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "= np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1,", "= np.random.rand(2).astype(np.float32) + 2.0 #pow op with the latter array", "= np.sum(X, axis=0) res = np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc,", "supported with the CUDA op dc_cpu_only = [d for d", "Y): res = np.sum(X, axis=0) res = np.sum(res, axis=0) return", "@serial.given(**hu.gcs) def test_broadcast_powt(self, gc, dc): np.random.seed(101) #operator def powt_op(X, Y):", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y[:, :, np.newaxis]) self.assertGradientChecks(gc,", "3, 2 and 1 dims to account for broadcast def", "the latter array increased by one dim def powt_op_mixed(X, Y):", "1 dims to account for broadcast def powt_grad_axis0(g_out, outputs, fwd_inputs):", "broadcasting last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) +", "fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[:, np.newaxis, np.newaxis,", "5).astype(np.float16) Y = np.random.rand(2, 3).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "ends X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1,", ":]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0), (1, 4, 1))])", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y[:, :, np.newaxis])", "np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # fp64 is", "powt_grad(g_out, outputs, [X, Y[:, :, np.newaxis]]) return ([GX, np.sum(np.sum(GY, 3),", "core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X)", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc,", "X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X,", "res, decimal=0) # broadcasting with single elem dimensions at both", "is sumed over 1 and 0 dims to account for", "= np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=3) res", "if d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0]) @unittest.skipIf(not", "X + Y[:, :, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0])", "increased by one dim def powt_op_axis0(X, Y): return powt_op(X, Y[:,", "4, 5).astype(np.float16) Y = np.random.rand(1, 3, 4, 1).astype(np.float16) op =", "5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\",", "= np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "5).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, device_option=gc)", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) def ref_op(X,", "X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting", "over 1 and 0 dims to account for broadcast def", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=3)", "broadcast def powt_grad_axis1(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX,", "\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "op, [X, Y], [0]) @unittest.skipIf(not workspace.has_gpu_support, \"No gpu support\") @given(**hu.gcs_gpu_only)", "op, [X, Y], [0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc, dc): net", "threshold=1e-3) # Set broadcast and no axis, i.e. broadcasting last", ":, :]) #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\",", "outputs[0] return ([Y * np.power(X, Y - 1), Z *", "def test_semantic_broadcast(self, gc, dc): # NCHW as default X =", "[\"X\", \"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\",", "1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", ":, :, :]) #two gradients Y*X^(Y-1) and X^Y * ln(X)", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3).astype(np.float32) op = core.CreateOperator(", "support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc, dc): # Set broadcast and", "def powt_op_mixed(X, Y): return powt_op(X, Y[np.newaxis, :, :, :]) #two", "axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting", "def powt_grad_axis1(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY]", "np.newaxis, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Sub(self, gc,", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op", "dim def powt_op_axis0(X, Y): return powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis])", "[\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out", "broadcast=1, axis=0) def ref_op(X, Y): res = np.sum(X, axis=3) res", "fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[:, :, np.newaxis]])", "0, 5]) net.GivenTensorFill([], [\"Y\"], values=[], shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"], \"out\",", "= fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[:, np.newaxis,", "workspace.has_gpu_support, \"No gpu support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc, dc): #", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y) self.assertDeviceChecks(dc, op, [X, Y], [0])", "output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self, gc, dc): # broadcasting constant", "NCHW as default X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Sub\",", "np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(2).astype(np.float32) +", "with single elem dimensions at both ends X = np.random.rand(2,", "fwd_inputs Z = outputs[0] return ([Y * np.power(X, Y -", "5).astype(np.float32) Y = np.random.rand(1, 3, 4, 1).astype(np.float32) op = core.CreateOperator(", "broadcast def powt_grad_axis0(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX,", "res = np.sum(X, axis=0) res = np.sum(res, axis=0) return [res]", ":, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X,", "broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3.", "= core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "3, 4, 5).astype(np.float32) Y = np.random.rand(1, 3, 4, 1).astype(np.float32) op", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1).astype(np.float32) op", "axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res", "\"out\", broadcast=1, device_option=gc) def ref_op(X, Y): res = np.sum(X, axis=0)", "Y], [0]) @given(**hu.gcs) def test_semantic_broadcast(self, gc, dc): # NCHW as", "[\"X\", \"Y\"], \"out\", broadcast=1) def ref_op(X, Y): res = np.sum(X,", "5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\",", "fp64 is not supported with the CUDA op dc_cpu_only =", "[X, Y] = fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X,", "Y] = fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[np.newaxis,", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1, 3, 4, 1).astype(np.float32)", "[X, Y], [0]) # NHWC X = np.random.rand(2, 3, 4,", "np.random.rand(1, 3, 4, 1).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "[np.power(X, Y)] #two gradients Y*X^(Y-1) and X^Y * ln(X) def", "GY] = powt_grad(g_out, outputs, [X, Y[:, np.newaxis, np.newaxis, np.newaxis]]) return", "hypothesis tests for better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self,", "* np.power(X, Y - 1), Z * np.log(X)] * g_out)", "Y] = fwd_inputs Z = outputs[0] return ([Y * np.power(X,", "import given import numpy as np from caffe2.proto import caffe2_pb2", "core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "np.testing.assert_array_almost_equal(out, X * Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y],", "= core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "1, [0]) @serial.given(**hu.gcs) def test_broadcast_powt(self, gc, dc): np.random.seed(101) #operator def", "= np.sum(X, axis=0) res = np.sum(res, axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks(", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op", "4, 1).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def", "= np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0])", "Y] = fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[:,", "5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"],", "out, X + Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y],", "np.random.rand(2, 3).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1,", "gradient is sumed over 1 and 0 dims to account", "op=op, inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting the first", "both ends X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting with single elem dimensions at", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, device_option=gc) def ref_op(X, Y):", "[0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Mul(self,", "([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"],", "= core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "3), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1)", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, device_option=gc) def", "[\"Y\"], values=[], shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs)", "Y], reference=ref_op, threshold=1e-3) # broadcasting with single elem dimensions at", "4).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", "[GX, GY] = powt_grad(g_out, outputs, [X, Y[:, :, np.newaxis]]) return", "from __future__ import print_function from __future__ import unicode_literals import unittest", "account for broadcast def powt_grad_mixed(g_out, outputs, fwd_inputs): [X, Y] =", "axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out,", "np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc, op, [X, Y], [0])", "Set broadcast and no axis, i.e. broadcasting last dimensions. X", "Y], reference=ref_op, threshold=1e-3) # broadcasting intermediate dimensions X = np.random.rand(2,", "decimal=0) # broadcasting with single elem dimensions at both ends", "= core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "+ Y[:, :, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc,", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X)", "reference=ref_op, threshold=1e-3) # broadcasting with single elem dimensions at both", "to account for broadcast def powt_grad_axis1(g_out, outputs, fwd_inputs): [X, Y]", "3, 4, 5).astype(np.float16) Y = np.random.rand(1, 3, 4, 1).astype(np.float16) op", "for broadcast def powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX, GY] = powt_grad(g_out,", "[X, Y], [0]) # Set broadcast and no axis, i.e.", "with the CUDA op dc_cpu_only = [d for d in", "workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out,", "X = np.random.rand(2, 3, 4, 500).astype(np.float64) Y = np.random.rand(1).astype(np.float64) op", "given import numpy as np from caffe2.proto import caffe2_pb2 from", "= core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc,", "core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y],", "with the latter array increased by one dim def powt_op_axis0(X,", "powt_op(X, Y[np.newaxis, :, :, :]) #two gradients Y*X^(Y-1) and X^Y", "= fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[:, :,", "4, 1).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1)", "Y], [0]) # broadcasting scalar X = np.random.rand(1).astype(np.float32) Y =", "def ref_op(X, Y): res = np.sum(X, axis=0) res = np.sum(res,", "powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX, GY] = powt_grad(g_out, outputs, fwd_inputs) return", "and X^Y * ln(X) #latter gradient is sumed over 1", "np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\",", "!= caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0]) @unittest.skipIf(not workspace.has_gpu_support, \"No", "test_broadcast_Mul(self, gc, dc): # Set broadcast and no axis, i.e.", "# fp64 is not supported with the CUDA op dc_cpu_only", "Y = np.random.rand(5).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\",", "= np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(4,", "Y = np.random.rand(2).astype(np.float32) + 2.0 #pow op with the latter", "def test_broadcast_Sub(self, gc, dc): # Set broadcast and no axis,", "np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) self.assertDeviceChecks(dc, op,", "5).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X)", "[X, Y], 1, [0]) # broadcasting with single elem dimensions", "@given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc, dc): # Set broadcast and no", "- Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X,", "4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"],", "op, [X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Mul(self, gc, dc):", "out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=3) res = np.sum(res,", "\"out\", broadcast=1) def ref_op(X, Y): res = np.sum(X, axis=0) res", "3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\",", "[res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y)", "__future__ import print_function from __future__ import unicode_literals import unittest from", "ends X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "axis=0) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3)", "self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def test_semantic_broadcast(self, gc, dc):", "\"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\",", "powt_grad(g_out, outputs, fwd_inputs) return ([GX, np.sum(np.sum(GY, 1), 0)]) op =", "+ Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def test_semantic_broadcast(self,", "# TODO(jiayq): make them hypothesis tests for better coverage. class", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc,", "op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X)", "([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0), (1, 4, 1))]) op =", "Y = np.random.rand(1, 3, 4, 1).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\",", "[X, Y], [0]) # broadcasting the first dimension X =", "decimal=3) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting intermediate dimensions", "3, 4, 500).astype(np.float64) Y = np.random.rand(1).astype(np.float64) op = core.CreateOperator( \"SumReduceLike\",", "powt_grad_mixed(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY] =", "np.random.seed(101) #operator def powt_op(X, Y): return [np.power(X, Y)] #two gradients", "Y[:, np.newaxis, np.newaxis, np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)])", "latter array increased by one dim def powt_op_axis0(X, Y): return", "np.random.rand(2).astype(np.float32) + 2.0 #pow op with the latter array increased", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y[:,", "out, X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(2, 3).astype(np.float16) op =", "Y): return powt_op(X, Y[:, :, np.newaxis]) #two gradients Y*X^(Y-1) and", "#operator def powt_op(X, Y): return [np.power(X, Y)] #two gradients Y*X^(Y-1)", "X = np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\", [\"X\",", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # NHWC X = np.random.rand(2,", "gpu support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc, dc): # Set broadcast", "Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"],", "[\"X\", \"Y\"], \"out\", broadcast=1, device_option=gc) def ref_op(X, Y): res =", "shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self,", "np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(1, 4,", "from caffe2.proto import caffe2_pb2 from caffe2.python import core, workspace import", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis, np.newaxis])", "5).astype(np.float32) + 1.0 Y = np.random.rand(2).astype(np.float32) + 2.0 #pow op", "import division from __future__ import print_function from __future__ import unicode_literals", "core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[], shape=[2, 0, 5]) net.GivenTensorFill([], [\"Y\"], values=[],", "increased by one dim def powt_op_axis1(X, Y): return powt_op(X, Y[:,", "grad_reference=powt_grad_broadcast) #2. broadcasting intermediate dimensions X = np.random.rand(2, 3, 4,", "outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY] = powt_grad(g_out,", "output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting the first dimension X = np.random.rand(2,", "Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1,", "4, 5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Add\",", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(5).astype(np.float32) op =", "* ln(X) #latter gradient is sumed over 3 and 0", "\"Y\"], \"out\", broadcast=1, device_option=gc) def ref_op(X, Y): res = np.sum(X,", "\"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self, gc, dc): # Set", "res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # fp64 is not", "3, 4, 5).astype(np.float32) Y = np.random.rand(5).astype(np.float32) op = core.CreateOperator( \"Add\",", "= np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1,", "[0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting the", "device_option=gc) def ref_op(X, Y): res = np.sum(X, axis=0) res =", "#latter gradient is sumed over 0 and 1 dims to", "test_semantic_broadcast(self, gc, dc): # NCHW as default X = np.random.rand(2,", "broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out =", "np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(4, 5).astype(np.float16) op =", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out,", "= np.random.rand(1, 3, 4, 1).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "Y): return powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis]) #two gradients Y*X^(Y-1)", "and 0 dims to account for broadcast def powt_grad_broadcast(g_out, outputs,", "Y[:, :, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op,", "broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y", "4, 5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Mul\",", "Y): res = np.sum(X, axis=3) res = np.sum(res, axis=2) return", "Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\",", "device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting with single", "3, 4, 5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op =", "np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X,", "np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc,", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) def ref_op(X, Y): res", "powt_grad(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs Z = outputs[0]", "axis=0) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X,", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) def ref_op(X,", "no axis, i.e. broadcasting last dimensions. X = np.random.rand(2, 3,", "net.GivenTensorFill([], [\"Y\"], values=[], shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0) workspace.RunNetOnce(net)", "dc): # broadcasting constant X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "ref_op(X, Y): res = np.sum(X, axis=0) res = np.sum(res, axis=2)", "3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(4, 5).astype(np.float32) +", "np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # NHWC X", "= np.sum(X, axis=3) res = np.sum(res, axis=2) return [res] self.assertReferenceChecks(", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op", "inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting with single elem", "broadcast=1) def ref_op(X, Y): res = np.sum(X, axis=0) res =", "values=[], shape=[2, 0, 5]) net.GivenTensorFill([], [\"Y\"], values=[], shape=[2, 0]) net.SumReduceLike([\"X\",", "[X, Y], [0]) # fp64 is not supported with the", "increased by one dim def powt_op_mixed(X, Y): return powt_op(X, Y[np.newaxis,", "over 3, 2 and 1 dims to account for broadcast", "dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32)", "4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"],", "np.newaxis, np.newaxis, np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)]) op", "4, 500).astype(np.float64) Y = np.random.rand(1).astype(np.float64) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) #", "Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def test_semantic_broadcast(self, gc,", "= core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X,", "res = np.sum(X, axis=3) res = np.sum(res, axis=2) return [res]", "\"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out =", "and 1 dims to account for broadcast def powt_grad_axis0(g_out, outputs,", "np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs)", "intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y =", "[X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Mul(self, gc, dc): #", "def powt_grad_axis0(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY]", "+ 2.0 #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter", "axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # fp64", "1)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc,", "= np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0)", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res =", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y) self.assertDeviceChecks(dc, op, [X,", "ends X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(1,", "caffe2.python.serialized_test.serialized_test_util as serial # TODO(jiayq): make them hypothesis tests for", "3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Add\",", "not supported with the CUDA op dc_cpu_only = [d for", "res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting intermediate dimensions", "= fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[np.newaxis, :,", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\",", "4, 1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "caffe2.python import core, workspace import caffe2.python.hypothesis_test_util as hu import caffe2.python.serialized_test.serialized_test_util", "ln(X) #latter gradient is sumed over 3 and 0 dims", "Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # NHWC", "dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y", "np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1,", "op, [X, Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Sub(self, gc, dc):", "np.testing.assert_array_almost_equal( out, X * Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op,", "0]) net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self, gc,", "- Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0])", "X^Y * ln(X) def powt_grad(g_out, outputs, fwd_inputs): [X, Y] =", "#latter gradient is sumed over 3 and 0 dims to", "self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting the", "[0]) # fp64 is not supported with the CUDA op", ":, np.newaxis]]) return ([GX, np.sum(np.sum(GY, 3), 0)]) op = core.CreateOperator(\"Pow\",", "is sumed over 0 and 1 dims to account for", "[0]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting the first", "Y[np.newaxis, :, :, :]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0),", "Y], [0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc, dc): net = core.Net('test')", "4, 1))]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1)", "np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1,", "[0]) # NHWC X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "= np.random.rand(4, 5).astype(np.float32) + 2.0 #two gradients Y*X^(Y-1) and X^Y", "Z * np.log(X)] * g_out) #1. Set broadcast and no", "# broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float16)", "3, 4, 1).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"],", "threshold=1e-3) # broadcasting intermediate dimensions X = np.random.rand(2, 3, 4,", "= core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "#2. broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "[0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @serial.given(**hu.gcs) def test_broadcast_powt(self,", "axis=0) res = np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc, op=op,", "Y): res = np.sum(X, axis=0) res = np.sum(res, axis=2) return", "broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out,", "Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting", "broadcast=1, device_option=gc) def ref_op(X, Y): res = np.sum(X, axis=0) res", "as default X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "the CUDA op dc_cpu_only = [d for d in dc", "core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "test_sum_reduce_fp16(self, gc, dc): # Set broadcast and no axis, i.e.", "res, decimal=3) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting intermediate", "0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op,", "* Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0])", "* g_out) #1. Set broadcast and no axis, i.e. broadcasting", "[0]) @given(**hu.gcs) def test_semantic_broadcast(self, gc, dc): # NCHW as default", "3, 4, 1).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "np.random.rand(2, 3, 4, 500).astype(np.float64) Y = np.random.rand(1).astype(np.float64) op = core.CreateOperator(", "\"Y\"], \"out\", broadcast=1, axis=1) def ref_op(X, Y): res = np.sum(X,", "dc): net = core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[], shape=[2,", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32)", "and no axis, i.e. broadcasting last dimensions. X = np.random.rand(2,", "X - Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y],", ":, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter", "test_broadcast_Add(self, gc, dc): # Set broadcast and no axis, i.e.", "Y = np.random.rand(2, 3).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc,", "intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "grad_reference=powt_grad_axis1) #3. broadcasting the first dimension X = np.random.rand(2, 3,", "Y = np.random.rand(1, 4, 1).astype(np.float32) + 2.0 #pow op with", "np.random.rand(1, 3, 4, 1).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "4, 1).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) if __name__ ==", "[0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting with", "0), (1, 4, 1))]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\",", "import unicode_literals import unittest from hypothesis import given import numpy", "single elem dimensions at both ends X = np.random.rand(2, 3,", "TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self, gc, dc): # Set broadcast and", "= np.sum(X, axis=0) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc,", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y[:, :, np.newaxis]) self.assertDeviceChecks(dc, op, [X,", "5).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X)", "intermediate dimensions X = np.random.rand(2, 3, 4, 500).astype(np.float64) Y =", "4).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", "res = np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y],", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op,", "Y], [0]) # Set broadcast and no axis, i.e. broadcasting", "by one dim def powt_op_axis1(X, Y): return powt_op(X, Y[:, :,", "op, [X, Y], [0]) # broadcasting the first dimension X", "4, 5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Sub\",", "3, 4, 5).astype(np.float32) Y = np.random.rand(2, 3).astype(np.float32) op = core.CreateOperator(", "Y - 1), Z * np.log(X)] * g_out) #1. Set", "sumed over 3 and 0 dims to account for broadcast", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y) self.assertDeviceChecks(dc, op, [X, Y],", "#pow op with the latter array increased by one dim", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X)", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y) self.assertDeviceChecks(dc, op, [X,", "X * Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op,", "np.testing.assert_array_almost_equal( out, X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0])", "reference=ref_op, threshold=1e-3) # Set broadcast and no axis, i.e. broadcasting", "3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Add\",", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) def", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting with single", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(5).astype(np.float32) op = core.CreateOperator(", "Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting intermediate dimensions X =", "Y = np.random.rand(4, 5).astype(np.float32) + 2.0 #two gradients Y*X^(Y-1) and", "workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) def ref_op(X, Y):", "def test_broadcast_Add(self, gc, dc): # Set broadcast and no axis,", "unittest from hypothesis import given import numpy as np from", "axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\")", ":, :]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0), (1, 4,", "powt_grad(g_out, outputs, [X, Y[:, np.newaxis, np.newaxis, np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY,", "unicode_literals import unittest from hypothesis import given import numpy as", "np from caffe2.proto import caffe2_pb2 from caffe2.python import core, workspace", "[X, Y], [0]) # broadcasting scalar X = np.random.rand(1).astype(np.float32) Y", "GY] = powt_grad(g_out, outputs, [X, Y[:, :, np.newaxis]]) return ([GX,", "dims to account for broadcast def powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX,", "[X, Y[:, np.newaxis, np.newaxis, np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY, 3), 2),", "dc_cpu_only = [d for d in dc if d.device_type !=", "= np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(4, 5).astype(np.float16) op", "[X, Y], 1, [0]) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "[\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "op=op, inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting intermediate dimensions", "out, X + Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X,", "dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "sumed over 0 and 1 dims to account for broadcast", "fwd_inputs [GX, GY] = powt_grad(g_out, outputs, [X, Y[np.newaxis, :, :,", "for broadcast def powt_grad_axis1(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X,", "dimensions at both ends X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "ln(X) #latter gradient is sumed over 1 and 0 dims", "axis=0) res = np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X,", "[\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "\"Y\"], \"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\",", "dim def powt_op_axis1(X, Y): return powt_op(X, Y[:, :, np.newaxis]) #two", "np.reshape(np.sum(np.sum(np.sum(GY, 3), 1), 0), (1, 4, 1))]) op = core.CreateOperator(\"Pow\",", "test_broadcast_scalar(self, gc, dc): # broadcasting constant X = np.random.rand(2, 3,", "one dim def powt_op_axis0(X, Y): return powt_op(X, Y[:, np.newaxis, np.newaxis,", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y) self.assertDeviceChecks(dc,", "net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self, gc, dc):", "axis=0) res = np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X,", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y[:, :,", "axis, i.e. broadcasting last dimensions. X = np.random.rand(2, 3, 4,", "inputs=[X, Y], reference=ref_op, threshold=1e-3) # Set broadcast and no axis,", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y[:, :,", "as hu import caffe2.python.serialized_test.serialized_test_util as serial # TODO(jiayq): make them", "0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc,", "powt_op_mixed(X, Y): return powt_op(X, Y[np.newaxis, :, :, :]) #two gradients", "4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\",", "axis=0) def ref_op(X, Y): res = np.sum(X, axis=3) res =", "Y], 1, [0]) @given(**hu.gcs) def test_broadcast_Mul(self, gc, dc): # Set", "([Y * np.power(X, Y - 1), Z * np.log(X)] *", "+ Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self,", "= core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting intermediate", "* ln(X) def powt_grad(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs", "\"Y\"], \"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self, gc, dc): #", "5).astype(np.float32) Y = np.random.rand(2, 3).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "= np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0)", "= workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res, axis=2)", "op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # Set broadcast and no", "5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"],", "self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting with", "Y = np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1)", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "dims to account for broadcast def powt_grad_axis1(g_out, outputs, fwd_inputs): [X,", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y) self.assertDeviceChecks(dc, op, [X, Y],", "#latter gradient is sumed over 1 and 0 dims to", "caffe2_pb2 from caffe2.python import core, workspace import caffe2.python.hypothesis_test_util as hu", "numpy as np from caffe2.proto import caffe2_pb2 from caffe2.python import", "dims to account for broadcast def powt_grad_mixed(g_out, outputs, fwd_inputs): [X,", "5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\",", "\"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out =", "to account for broadcast def powt_grad_axis0(g_out, outputs, fwd_inputs): [X, Y]", "dim def powt_op_mixed(X, Y): return powt_op(X, Y[np.newaxis, :, :, :])", "workspace.FetchBlob(\"out\") res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0) # broadcasting with", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X * Y[:,", "3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\",", "broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 500).astype(np.float64) Y", "1, [0]) @given(**hu.gcs) def test_broadcast_Mul(self, gc, dc): # Set broadcast", "4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\",", "X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X +", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y) self.assertDeviceChecks(dc, op,", "from __future__ import absolute_import from __future__ import division from __future__", "5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"],", "res = np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y],", "op, [X, Y], [0]) # NHWC X = np.random.rand(2, 3,", "one dim def powt_op_mixed(X, Y): return powt_op(X, Y[np.newaxis, :, :,", "dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3,", "core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y],", "with the latter array increased by one dim def powt_op_mixed(X,", "powt_grad(g_out, outputs, [X, Y[np.newaxis, :, :, :]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY,", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) def ref_op(X, Y):", "import numpy as np from caffe2.proto import caffe2_pb2 from caffe2.python", "\"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) def ref_op(X, Y): res =", "core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs)", "shape=[2, 0, 5]) net.GivenTensorFill([], [\"Y\"], values=[], shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"],", "import caffe2.python.serialized_test.serialized_test_util as serial # TODO(jiayq): make them hypothesis tests", "4).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\",", "Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting scalar X", "[d for d in dc if d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only,", "4, 1).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1)", "4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\",", "broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res", "= [d for d in dc if d.device_type != caffe2_pb2.CUDA]", "op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X)", "res = np.sum(X, axis=0) res = np.sum(res, axis=2) return [res]", "core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "op=op, inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting with single", "- Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1,", "res = np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X,", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(", "and X^Y * ln(X) #latter gradient is sumed over 3,", "for d in dc if d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op,", "res = np.sum(X, axis=0) res = np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res)", "op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X,", "[\"X\", \"Y\"], \"out\", broadcast=1, axis=0) def ref_op(X, Y): res =", "def powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX, GY] = powt_grad(g_out, outputs, fwd_inputs)", "broadcasting scalar X = np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([]) op =", "constant X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1).astype(np.float32)", "1, [0]) @given(**hu.gcs) def test_broadcast_Sub(self, gc, dc): # Set broadcast", "gc, dc): # Set broadcast and no axis, i.e. broadcasting", "* Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X,", "def powt_op(X, Y): return [np.power(X, Y)] #two gradients Y*X^(Y-1) and", "op, [X, Y], 1, [0]) # broadcasting the first dimension", "#3. broadcasting the first dimension X = np.random.rand(2, 3, 4,", "test_sum_reduce(self, gc, dc): # Set broadcast and no axis, i.e.", "op, [X, Y], 1, [0]) # broadcasting intermediate dimensions X", "@given(**hu.gcs) def test_sum_reduce(self, gc, dc): # Set broadcast and no", "np.newaxis]]) return ([GX, np.sum(np.sum(GY, 3), 0)]) op = core.CreateOperator(\"Pow\", [\"X\",", "workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out,", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y[:, :, np.newaxis]) self.assertDeviceChecks(dc, op,", "res = np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0) # broadcasting with single", "np.sum(X, axis=3) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc,", "powt_op_axis0(X, Y): return powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis]) #two gradients", "= powt_grad(g_out, outputs, fwd_inputs) return ([GX, np.sum(np.sum(GY, 1), 0)]) op", "reference=ref_op, threshold=1e-3) # broadcasting intermediate dimensions X = np.random.rand(2, 3,", "self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # Set broadcast", "def test_sum_reduce_empty_blob(self, gc, dc): net = core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([],", "is sumed over 3 and 0 dims to account for", "3, 4, 5).astype(np.float16) Y = np.random.rand(3, 4).astype(np.float16) op = core.CreateOperator(", "outputs, [X, Y[np.newaxis, :, :, :]]) return ([GX, np.reshape(np.sum(np.sum(np.sum(GY, 3),", "= core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X,", "broadcast and no axis, i.e. broadcasting last dimensions. X =", "fwd_inputs): [X, Y] = fwd_inputs Z = outputs[0] return ([Y", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y) self.assertDeviceChecks(dc, op, [X,", "GY] = powt_grad(g_out, outputs, [X, Y[np.newaxis, :, :, :]]) return", "5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"],", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op", "X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(1, 3,", "#two gradients Y*X^(Y-1) and X^Y * ln(X) #latter gradient is", "np.sum(res, axis=0) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op,", "# broadcasting scalar X = np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([]) op", "5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"],", "better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self, gc, dc): #", "broadcasting constant X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "1).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "np.newaxis, np.newaxis, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y * ln(X)", "ln(X) def powt_grad(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs Z", "axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "at both ends X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X *", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X * Y[:, np.newaxis, np.newaxis,", "and X^Y * ln(X) #latter gradient is sumed over 3", "out, X + Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs)", "3), 2), 1)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1,", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting the first", "device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) if __name__ == \"__main__\":", "TODO(jiayq): make them hypothesis tests for better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase):", "def powt_op_axis1(X, Y): return powt_op(X, Y[:, :, np.newaxis]) #two gradients", "device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting intermediate dimensions", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X - Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc,", "gc, dc): net = core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[],", "5).astype(np.float32) Y = np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\",", "values=[], shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0) workspace.RunNetOnce(net) @given(**hu.gcs) def", "# broadcasting the first dimension X = np.random.rand(2, 3, 4,", "* ln(X) #latter gradient is sumed over 1 and 0", "np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "at both ends X = np.random.rand(2, 3, 4, 5).astype(np.float32) +", "5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\",", "self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1,", "3, 4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\",", "broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting", "outputs, fwd_inputs): [GX, GY] = powt_grad(g_out, outputs, fwd_inputs) return ([GX,", "[X, Y], [0]) @unittest.skipIf(not workspace.has_gpu_support, \"No gpu support\") @given(**hu.gcs_gpu_only) def", "coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self, gc, dc): # Set", "= workspace.FetchBlob(\"out\") res = np.sum(X, axis=3) res = np.sum(res, axis=2)", "np.random.rand(4, 5).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1,", "from hypothesis import given import numpy as np from caffe2.proto", "5).astype(np.float32) + 1.0 Y = np.random.rand(1, 4, 1).astype(np.float32) + 2.0", "over 3 and 0 dims to account for broadcast def", "np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op,", "self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting with", "Y], [0]) # broadcasting the first dimension X = np.random.rand(2,", "X^Y * ln(X) #latter gradient is sumed over 1 and", "outputs, fwd_inputs): [X, Y] = fwd_inputs Z = outputs[0] return", "5).astype(np.float16) Y = np.random.rand(1, 3, 4, 1).astype(np.float16) op = core.CreateOperator(", "broadcast def powt_grad_mixed(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX,", "Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self, gc, dc): #", "= np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\",", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y[:, :, np.newaxis]) self.assertGradientChecks(gc,", "[0]) # Set broadcast and no axis, i.e. broadcasting last", "op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X)", "4, 5).astype(np.float32) Y = np.random.rand(1, 3, 4, 1).astype(np.float32) op =", "grad_reference=powt_grad_axis0) #4. broadcasting with single elem dimensions at both ends", "1, [0]) # broadcasting intermediate dimensions X = np.random.rand(2, 3,", "Y = np.random.rand(1, 3, 4, 1).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\",", "500).astype(np.float64) Y = np.random.rand(1).astype(np.float64) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y)", "= np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y],", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) def", "out, X - Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X,", "4, 5).astype(np.float16) Y = np.random.rand(2, 3).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\",", "[\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "Y], [0]) @unittest.skipIf(not workspace.has_gpu_support, \"No gpu support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self,", "= np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(2).astype(np.float32)", "the latter array increased by one dim def powt_op_axis1(X, Y):", "broadcasting the first dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "gradient is sumed over 0 and 1 dims to account", "np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y],", "4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\",", "@given(**hu.gcs) def test_broadcast_Mul(self, gc, dc): # Set broadcast and no", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting intermediate dimensions", "array increased by one dim def powt_op_axis0(X, Y): return powt_op(X,", "np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X)", "op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\",", "d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0]) @unittest.skipIf(not workspace.has_gpu_support,", "[X, Y], [0]) @given(**hu.gcs) def test_semantic_broadcast(self, gc, dc): # NCHW", "= fwd_inputs Z = outputs[0] return ([Y * np.power(X, Y", "Y], 1, [0]) # broadcasting with single elem dimensions at", "(1, 4, 1))]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1,", "[res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) if __name__", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y) self.assertDeviceChecks(dc, op, [X, Y], [0])", "for broadcast def powt_grad_axis0(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs", "= np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\",", "X * Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1,", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(5).astype(np.float32) op", "last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0", "np.sum(X, axis=0) res = np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc,", "return powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis]) #two gradients Y*X^(Y-1) and", "hypothesis import given import numpy as np from caffe2.proto import", "np.testing.assert_array_almost_equal(out, X + Y[:, :, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y],", "outputs, fwd_inputs) return ([GX, np.sum(np.sum(GY, 1), 0)]) op = core.CreateOperator(\"Pow\",", "op, [X, Y], [0]) # broadcasting intermediate dimensions X =", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # Set broadcast and no", "1), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc,", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X - Y[:, np.newaxis, np.newaxis, np.newaxis])", "= np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(1, 3, 4,", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op,", "# broadcasting constant X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "3), 1), 0), (1, 4, 1))]) op = core.CreateOperator(\"Pow\", [\"X\",", "Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\",", "test_broadcast_powt(self, gc, dc): np.random.seed(101) #operator def powt_op(X, Y): return [np.power(X,", "+ 1.0 Y = np.random.rand(2).astype(np.float32) + 2.0 #pow op with", "workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res =", "([GX, np.sum(np.sum(GY, 3), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\",", "return ([GX, np.sum(np.sum(GY, 1), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"],", "dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2,", "X^Y * ln(X) #latter gradient is sumed over 3 and", "dc): np.random.seed(101) #operator def powt_op(X, Y): return [np.power(X, Y)] #two", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y) self.assertDeviceChecks(dc,", "np.random.rand(4, 5).astype(np.float32) + 2.0 #two gradients Y*X^(Y-1) and X^Y *", "return powt_op(X, Y[np.newaxis, :, :, :]) #two gradients Y*X^(Y-1) and", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X,", "4, 5).astype(np.float32) + 1.0 Y = np.random.rand(4, 5).astype(np.float32) + 2.0", "Y = np.random.rand(3, 4).astype(np.float32) + 2.0 #pow op with the", "4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\",", "np.testing.assert_array_almost_equal(out, X - Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc,", "axis=0) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting", "#two gradients Y*X^(Y-1) and X^Y * ln(X) def powt_grad(g_out, outputs,", "op with the latter array increased by one dim def", "core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "3).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0)", "np.random.rand(5).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\",", "broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\")", "to account for broadcast def powt_grad_mixed(g_out, outputs, fwd_inputs): [X, Y]", "Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc,", "1))]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc,", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X - Y[:,", "np.sum(res, axis=2) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op,", "[res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # Set", "workspace.RunNetOnce(net) @given(**hu.gcs) def test_sum_reduce(self, gc, dc): # Set broadcast and", "X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(2, 3).astype(np.float16)", "CUDA op dc_cpu_only = [d for d in dc if", "= np.sum(X, axis=0) res = np.sum(res, axis=0) return [res] self.assertReferenceChecks(", "broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\")", "\"Y\"], \"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op)", "np.random.rand(3, 4).astype(np.float32) + 2.0 #pow op with the latter array", "= np.array(np.sum(X)) np.testing.assert_array_almost_equal(out, res, decimal=0) # broadcasting with single elem", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X * Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc,", "last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y =", "and 0 dims to account for broadcast def powt_grad_axis1(g_out, outputs,", "np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1,", "np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "gc, dc): # NCHW as default X = np.random.rand(2, 3,", "= np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(3, 4).astype(np.float16)", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=3) res =", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis])", "[X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @given(**hu.gcs)", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y[:, :, np.newaxis])", "res = np.sum(res, axis=0) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X,", "np.newaxis, np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)]) op =", "1.0 Y = np.random.rand(2).astype(np.float32) + 2.0 #pow op with the", "[X, Y], 1, [0]) @serial.given(**hu.gcs) def test_broadcast_powt(self, gc, dc): np.random.seed(101)", "print_function from __future__ import unicode_literals import unittest from hypothesis import", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op =", "Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @given(**hu.gcs) def", "powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y", "np.sum(res, axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op,", "return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) if", "[\"X\", \"Y\"], \"out\", broadcast=1, axis=1) def ref_op(X, Y): res =", "self.assertGradientChecks(gc, op, [X, Y], 1, [0]) self.assertDeviceChecks(dc, op, [X, Y],", "last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "- 1), Z * np.log(X)] * g_out) #1. Set broadcast", "= powt_grad(g_out, outputs, [X, Y[np.newaxis, :, :, :]]) return ([GX,", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y[:, :, np.newaxis])", "broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(", "over 0 and 1 dims to account for broadcast def", "Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0])", "= core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "[0]) # broadcasting with single elem dimensions at both ends", "powt_op(X, Y): return [np.power(X, Y)] #two gradients Y*X^(Y-1) and X^Y", "= np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(2, 3).astype(np.float16) op", "account for broadcast def powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX, GY] =", "* Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1,", "5).astype(np.float32) + 1.0 Y = np.random.rand(3, 4).astype(np.float32) + 2.0 #pow", "2 and 1 dims to account for broadcast def powt_grad_axis0(g_out,", "ln(X) #latter gradient is sumed over 3, 2 and 1", "X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X -", "axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(", "Y = np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1)", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y) self.assertDeviceChecks(dc, op,", "[0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) # broadcasting intermediate", "np.random.rand(3, 4).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1,", "3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Mul\",", "1 dims to account for broadcast def powt_grad_mixed(g_out, outputs, fwd_inputs):", "Y = np.random.rand(4, 5).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "= np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1)", "\"Y\"], \"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast)", "return ([GX, np.sum(np.sum(GY, 3), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"],", "= np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\",", "account for broadcast def powt_grad_axis0(g_out, outputs, fwd_inputs): [X, Y] =", "is sumed over 3, 2 and 1 dims to account", "Y], [0]) # fp64 is not supported with the CUDA", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y[:,", "core, workspace import caffe2.python.hypothesis_test_util as hu import caffe2.python.serialized_test.serialized_test_util as serial", "X - Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1,", "\"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out", "3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(3, 4).astype(np.float32) +", "3, 4, 5).astype(np.float16) Y = np.random.rand(4, 5).astype(np.float16) op = core.CreateOperator(", "for better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def test_broadcast_Add(self, gc, dc):", "3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(2).astype(np.float32) + 2.0", "4, 5).astype(np.float32) Y = np.random.rand(2, 3).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\",", "@given(**hu.gcs) def test_broadcast_Sub(self, gc, dc): # Set broadcast and no", "by one dim def powt_op_axis0(X, Y): return powt_op(X, Y[:, np.newaxis,", "op, [X, Y], [0]) # broadcasting scalar X = np.random.rand(1).astype(np.float32)", "[0]) # broadcasting scalar X = np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([])", "np.sum(X, axis=0) res = np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op,", "as serial # TODO(jiayq): make them hypothesis tests for better", "np.log(X)] * g_out) #1. Set broadcast and no axis, i.e.", "np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1,", "[X, Y], [0]) # broadcasting with single elem dimensions at", "inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting the first dimension", "\"out\", broadcast=1, axis=0) def ref_op(X, Y): res = np.sum(X, axis=3)", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X +", "= np.random.rand(4, 5).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "2), 1)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=0)", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2, 3).astype(np.float32)", "np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting", "from __future__ import division from __future__ import print_function from __future__", "np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self,", "np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X)", "intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0", "\"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1)", "gc, dc): # broadcasting constant X = np.random.rand(2, 3, 4,", "serial # TODO(jiayq): make them hypothesis tests for better coverage.", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y[:, :, np.newaxis]) self.assertDeviceChecks(dc,", "= np.random.rand(5).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1,", "= core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "axis=0) res = np.sum(res, axis=0) return [res] self.assertReferenceChecks( device_option=gc, op=op,", "op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=0) self.assertReferenceChecks(device_option=gc, op=op,", "gradients Y*X^(Y-1) and X^Y * ln(X) def powt_grad(g_out, outputs, fwd_inputs):", "np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\",", "np.newaxis]]) return ([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)]) op = core.CreateOperator(\"Pow\",", "@given(**hu.gcs) def test_semantic_broadcast(self, gc, dc): # NCHW as default X", "= np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"],", "broadcasting last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "dims to account for broadcast def powt_grad_axis0(g_out, outputs, fwd_inputs): [X,", "g_out) #1. Set broadcast and no axis, i.e. broadcasting last", "def powt_grad_mixed(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY]", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X * Y)", "inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting intermediate dimensions X =", "res = np.sum(res, axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X,", "op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X)", "np.sum(X, axis=0) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op,", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1).astype(np.float32) op =", "[GX, GY] = powt_grad(g_out, outputs, [X, Y[:, np.newaxis, np.newaxis, np.newaxis]])", "inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting with single elem dimensions", "return ([GX, np.sum(np.sum(np.sum(GY, 3), 2), 1)]) op = core.CreateOperator(\"Pow\", [\"X\",", "GY] = powt_grad(g_out, outputs, fwd_inputs) return ([GX, np.sum(np.sum(GY, 1), 0)])", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2, 3).astype(np.float32) op", "grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self, gc, dc): # broadcasting constant X", "outputs, [X, Y[:, :, np.newaxis]]) return ([GX, np.sum(np.sum(GY, 3), 0)])", "Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) self.assertDeviceChecks(dc,", "Y], [0]) # broadcasting intermediate dimensions X = np.random.rand(2, 3,", "4).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1)", "Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1,", "caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0]) @unittest.skipIf(not workspace.has_gpu_support, \"No gpu", "d in dc if d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X,", "= np.sum(res, axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y],", "4, 5).astype(np.float32) + 1.0 Y = np.random.rand(3, 4).astype(np.float32) + 2.0", "return powt_op(X, Y[:, :, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y", "X^Y * ln(X) #latter gradient is sumed over 3, 2", "at both ends X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y", "np.random.rand(1).astype(np.float64) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(2, 3).astype(np.float32) op =", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3).astype(np.float32) op =", "1, [0]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting with", "@given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc, dc): net = core.Net('test') with core.DeviceScope(gc):", "1, [0]) # broadcasting with single elem dimensions at both", "core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[], shape=[2, 0, 5]) net.GivenTensorFill([],", "Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X)", "3, 4, 5).astype(np.float16) Y = np.random.rand(2, 3).astype(np.float16) op = core.CreateOperator(", "np.power(X, Y - 1), Z * np.log(X)] * g_out) #1.", "\"Y\"], \"out\", broadcast=1) def ref_op(X, Y): res = np.sum(X, axis=0)", "output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting with single elem dimensions at both", "\"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out =", "to account for broadcast def powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX, GY]", "np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res", "and X^Y * ln(X) #latter gradient is sumed over 0", "\"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed)", "Y = np.random.rand(3, 4).astype(np.float16) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"],", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1, 3,", "X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(4, 5).astype(np.float16)", "# broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 5).astype(np.float32) +", "def ref_op(X, Y): res = np.sum(X, axis=3) res = np.sum(res,", "= workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res, axis=0)", "axis=0) res = np.sum(res, axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc, op=op,", "= core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\",", "+ Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting scalar", "self.assertDeviceChecks(dc_cpu_only, op, [X, Y], [0]) @unittest.skipIf(not workspace.has_gpu_support, \"No gpu support\")", "4, 5).astype(np.float32) Y = np.random.rand(5).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\",", "np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(3, 4).astype(np.float32)", "+ Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X,", "np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X,", "return ([Y * np.power(X, Y - 1), Z * np.log(X)]", "5).astype(np.float32) + 2.0 #two gradients Y*X^(Y-1) and X^Y * ln(X)", "op dc_cpu_only = [d for d in dc if d.device_type", "= np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0)", "np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) self.assertDeviceChecks(dc,", "workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y) self.assertDeviceChecks(dc, op, [X, Y],", "import print_function from __future__ import unicode_literals import unittest from hypothesis", "0 and 1 dims to account for broadcast def powt_grad_mixed(g_out,", "2.0 #two gradients Y*X^(Y-1) and X^Y * ln(X) #latter gradient", "axis=2) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3)", "np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "5).astype(np.float32) Y = np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\",", "op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # fp64 is not supported", "4, 5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"],", "Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) @serial.given(**hu.gcs) def", "op, [X, Y], 1, [0]) @serial.given(**hu.gcs) def test_broadcast_powt(self, gc, dc):", "Y[:, :, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y * ln(X)", "<reponame>YevhenVieskov/ML-DL-in-production from __future__ import absolute_import from __future__ import division from", "= np.sum(X, axis=3) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3)", "self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting intermediate", "powt_op(X, Y[:, :, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y *", "device_option=gc, op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # Set broadcast and", "= np.random.rand(1).astype(np.float64) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1)", "Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\",", "def test_sum_reduce(self, gc, dc): # Set broadcast and no axis,", "np.testing.assert_array_almost_equal(out, res, decimal=0) # broadcasting with single elem dimensions at", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:,", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X * Y[:, np.newaxis,", "ref_op(X, Y): res = np.sum(X, axis=3) res = np.sum(res, axis=2)", "[0]) @given(**hu.gcs) def test_broadcast_Mul(self, gc, dc): # Set broadcast and", "\"No gpu support\") @given(**hu.gcs_gpu_only) def test_sum_reduce_fp16(self, gc, dc): # Set", "5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"],", "X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.array(np.sum(X))", "np.random.rand(2, 3, 4, 5).astype(np.float16) Y = np.random.rand(1, 3, 4, 1).astype(np.float16)", "test_sum_reduce_empty_blob(self, gc, dc): net = core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"],", "[X, Y] = fwd_inputs Z = outputs[0] return ([Y *", "broadcasting with single elem dimensions at both ends X =", "Y*X^(Y-1) and X^Y * ln(X) def powt_grad(g_out, outputs, fwd_inputs): [X,", "4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\",", "dimensions X = np.random.rand(2, 3, 4, 500).astype(np.float64) Y = np.random.rand(1).astype(np.float64)", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y =", "latter array increased by one dim def powt_op_mixed(X, Y): return", "3, 4, 5).astype(np.float32) Y = np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\",", "latter array increased by one dim def powt_op_axis1(X, Y): return", "= outputs[0] return ([Y * np.power(X, Y - 1), Z", "# Set broadcast and no axis, i.e. broadcasting last dimensions.", "axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting", "[X, Y], [0]) # broadcasting intermediate dimensions X = np.random.rand(2,", "dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y", "= np.random.rand(2, 3).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "elem dimensions at both ends X = np.random.rand(2, 3, 4,", "np.newaxis]) self.assertGradientChecks(gc, op, [X, Y], 1, [0]) self.assertDeviceChecks(dc, op, [X,", "5).astype(np.float32) + 1.0 Y = np.random.rand(4, 5).astype(np.float32) + 2.0 #two", "np.random.rand(1, 4, 1).astype(np.float32) + 2.0 #pow op with the latter", "workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X - Y) self.assertDeviceChecks(dc, op,", "fwd_inputs): [GX, GY] = powt_grad(g_out, outputs, fwd_inputs) return ([GX, np.sum(np.sum(GY,", "[GX, GY] = powt_grad(g_out, outputs, [X, Y[np.newaxis, :, :, :]])", "5).astype(np.float32) Y = np.random.rand(5).astype(np.float32) op = core.CreateOperator( \"Add\", [\"X\", \"Y\"],", "ln(X) #latter gradient is sumed over 0 and 1 dims", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting with single elem", "5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting scalar X =", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X -", "op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting with single elem", "op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) if __name__ == \"__main__\": unittest.main()", "1), Z * np.log(X)] * g_out) #1. Set broadcast and", "\"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\")", "out, X * Y[:, np.newaxis, np.newaxis, np.newaxis]) self.assertGradientChecks(gc, op, [X,", "array increased by one dim def powt_op_mixed(X, Y): return powt_op(X,", "Z = outputs[0] return ([Y * np.power(X, Y - 1),", "np.sum(np.sum(GY, 1), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1)", "= np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(1, 3, 4,", "= np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0])", "Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res", "([GX, np.sum(np.sum(GY, 1), 0)]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\",", "Y], reference=powt_op_axis0, output_to_grad=\"Z\", grad_reference=powt_grad_axis0) #4. broadcasting with single elem dimensions", "X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3).astype(np.float32) op", "\"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out", "core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting intermediate dimensions X", "4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\",", "np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(4, 5).astype(np.float32)", "them hypothesis tests for better coverage. class TestElementwiseBroadcast(serial.SerializedTestCase): @given(**hu.gcs) def", "1, [0]) # broadcasting the first dimension X = np.random.rand(2,", "\"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\",", "broadcast=1, axis=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\")", "net = core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[], shape=[2, 0,", "__future__ import division from __future__ import print_function from __future__ import", "op, [X, Y], [0]) self.assertGradientChecks(gc, op, [X, Y], 1, [0])", "def powt_op_axis0(X, Y): return powt_op(X, Y[:, np.newaxis, np.newaxis, np.newaxis]) #two", "Y)] #two gradients Y*X^(Y-1) and X^Y * ln(X) def powt_grad(g_out,", "broadcast def powt_grad_broadcast(g_out, outputs, fwd_inputs): [GX, GY] = powt_grad(g_out, outputs,", "axis=3) res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc, op,", "[X, Y], 1, [0]) # broadcasting intermediate dimensions X =", "Y[:, np.newaxis, np.newaxis, np.newaxis]) #two gradients Y*X^(Y-1) and X^Y *", "np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1)", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y) self.assertDeviceChecks(dc, op, [X, Y],", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X - Y[:, np.newaxis, np.newaxis,", "inputs=[X, Y], reference=powt_op_mixed, output_to_grad=\"Z\", grad_reference=powt_grad_mixed) @given(**hu.gcs) def test_broadcast_scalar(self, gc, dc):", "1, [0]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting the", "np.sum(X, axis=0) res = np.sum(res, axis=2) return [res.reshape(Y.shape)] self.assertReferenceChecks( device_option=gc,", "= np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1,", "[\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op_axis1,", "5).astype(np.float32) Y = np.random.rand(2).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\",", "first dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "dc): # Set broadcast and no axis, i.e. broadcasting last", "\"Y\"], \"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out", "core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) def ref_op(X, Y):", "= core.CreateOperator( \"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\") workspace.FeedBlob(\"X\", X)", "* ln(X) #latter gradient is sumed over 0 and 1", "[0]) # broadcasting the first dimension X = np.random.rand(2, 3,", "__future__ import absolute_import from __future__ import division from __future__ import", "\"Z\", broadcast=1) self.assertReferenceChecks(device_option=gc, op=op, inputs=[X, Y], reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2.", "[\"X\"], values=[], shape=[2, 0, 5]) net.GivenTensorFill([], [\"Y\"], values=[], shape=[2, 0])", "reference=powt_op, output_to_grad=\"Z\", grad_reference=powt_grad_broadcast) #2. broadcasting intermediate dimensions X = np.random.rand(2,", "3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(", "workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res, axis=2).reshape(Y.shape) np.testing.assert_array_almost_equal(out,", "\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1, axis_str=\"C\", order=\"NHWC\") workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Mul\",", "4, 5).astype(np.float32) Y = np.random.rand(1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"],", "= core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) def ref_op(X,", "= np.sum(X, axis=0) res = np.sum(res, axis=2) return [res] self.assertReferenceChecks(", "op, [X, Y], [0]) # broadcasting with single elem dimensions", "res = np.sum(X, axis=0) res = np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res)", "4, 1).astype(np.float32) + 2.0 #pow op with the latter array", "scalar X = np.random.rand(1).astype(np.float32) Y = np.random.rand(1).astype(np.float32).reshape([]) op = core.CreateOperator(\"Add\",", "array increased by one dim def powt_op_axis1(X, Y): return powt_op(X,", "= np.random.rand(1, 4, 1).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\",", "+ 1.0 Y = np.random.rand(3, 4).astype(np.float32) + 2.0 #pow op", "= powt_grad(g_out, outputs, [X, Y[:, :, np.newaxis]]) return ([GX, np.sum(np.sum(GY,", "= np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1)", "test_broadcast_Sub(self, gc, dc): # Set broadcast and no axis, i.e.", "= core.Net('test') with core.DeviceScope(gc): net.GivenTensorFill([], [\"X\"], values=[], shape=[2, 0, 5])", "the first dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32) +", "* ln(X) #latter gradient is sumed over 3, 2 and", "X^Y * ln(X) #latter gradient is sumed over 0 and", "\"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out =", "1).astype(np.float32) + 2.0 #pow op with the latter array increased", "= np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1)", "5]) net.GivenTensorFill([], [\"Y\"], values=[], shape=[2, 0]) net.SumReduceLike([\"X\", \"Y\"], \"out\", axis=0)", "reference=powt_op_axis1, output_to_grad=\"Z\", grad_reference=powt_grad_axis1) #3. broadcasting the first dimension X =", "1.0 Y = np.random.rand(1, 4, 1).astype(np.float32) + 2.0 #pow op", "self.assertDeviceChecks(dc, op, [X, Y], [0]) @given(**hu.gcs) def test_sum_reduce_empty_blob(self, gc, dc):", "account for broadcast def powt_grad_axis1(g_out, outputs, fwd_inputs): [X, Y] =", "powt_grad_axis0(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs [GX, GY] =", "out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X + Y[:, np.newaxis, np.newaxis,", "op = core.CreateOperator(\"Mul\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X)", "op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "powt_op_axis1(X, Y): return powt_op(X, Y[:, :, np.newaxis]) #two gradients Y*X^(Y-1)", "the latter array increased by one dim def powt_op_axis0(X, Y):", "caffe2.python.hypothesis_test_util as hu import caffe2.python.serialized_test.serialized_test_util as serial # TODO(jiayq): make", "= workspace.FetchBlob(\"out\") res = np.sum(X, axis=0) res = np.sum(res, axis=2).reshape(Y.shape)", "self.assertDeviceChecks(dc, op, [X, Y], [0]) # broadcasting intermediate dimensions X", "= np.sum(res, axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0])", "3 and 0 dims to account for broadcast def powt_grad_axis1(g_out,", "import caffe2_pb2 from caffe2.python import core, workspace import caffe2.python.hypothesis_test_util as", "# NHWC X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y =", "np.testing.assert_array_almost_equal(out, X - Y[:, :, np.newaxis]) self.assertGradientChecks(gc, op, [X, Y],", "def powt_grad(g_out, outputs, fwd_inputs): [X, Y] = fwd_inputs Z =", "return [np.power(X, Y)] #two gradients Y*X^(Y-1) and X^Y * ln(X)", "X) workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X", "first dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32) + 1.0", "core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\", broadcast=1, axis=0) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\", Y)", "+ Y[:, np.newaxis, np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) #", "Y], 1, [0]) # broadcasting intermediate dimensions X = np.random.rand(2,", "op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X) workspace.FeedBlob(\"Y\",", "op=op, inputs=[X, Y], reference=ref_op, threshold=1e-3) # broadcasting intermediate dimensions X", "i.e. broadcasting last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float16)", "3, 4, 5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator(\"Sub\",", "by one dim def powt_op_mixed(X, Y): return powt_op(X, Y[np.newaxis, :,", "gradient is sumed over 3 and 0 dims to account", "broadcasting last dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float16) Y", "fwd_inputs) return ([GX, np.sum(np.sum(GY, 1), 0)]) op = core.CreateOperator(\"Pow\", [\"X\",", "# broadcasting intermediate dimensions X = np.random.rand(2, 3, 4, 500).astype(np.float64)", "NHWC X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(5).astype(np.float32)", "op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"], \"Z\", broadcast=1, axis=1) self.assertReferenceChecks(device_option=gc, op=op,", "the first dimension X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y", "np.testing.assert_array_almost_equal(out, X * Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc,", "np.newaxis]) self.assertDeviceChecks(dc, op, [X, Y], [0]) # NHWC X =", "+ 1.0 Y = np.random.rand(1, 4, 1).astype(np.float32) + 2.0 #pow", "workspace.FeedBlob(\"Y\", Y) workspace.RunOperatorOnce(op) out = workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal(out, X + Y)", "sumed over 3, 2 and 1 dims to account for", "from caffe2.python import core, workspace import caffe2.python.hypothesis_test_util as hu import", "dimensions. X = np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(4,", "np.random.rand(2, 3, 4, 5).astype(np.float32) Y = np.random.rand(3, 4).astype(np.float32) op =", "is not supported with the CUDA op dc_cpu_only = [d", "Y = np.random.rand(1).astype(np.float64) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\",", "Y*X^(Y-1) and X^Y * ln(X) #latter gradient is sumed over", "[0]) # broadcasting intermediate dimensions X = np.random.rand(2, 3, 4,", "# NCHW as default X = np.random.rand(2, 3, 4, 5).astype(np.float32)", "and 1 dims to account for broadcast def powt_grad_mixed(g_out, outputs,", "5).astype(np.float32) op = core.CreateOperator(\"Add\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\", X)", "Y = np.random.rand(3, 4).astype(np.float32) op = core.CreateOperator(\"Sub\", [\"X\", \"Y\"], \"out\",", "= np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res, decimal=3) self.assertDeviceChecks(dc, op, [X, Y],", "import unittest from hypothesis import given import numpy as np", "* np.log(X)] * g_out) #1. Set broadcast and no axis,", "[0]) @serial.given(**hu.gcs) def test_broadcast_powt(self, gc, dc): np.random.seed(101) #operator def powt_op(X,", "= np.sum(res, axis=0) return [res] self.assertReferenceChecks( device_option=gc, op=op, inputs=[X, Y],", "#4. broadcasting with single elem dimensions at both ends X", "X - Y) self.assertDeviceChecks(dc, op, [X, Y], [0]) self.assertGradientChecks(gc, op,", "3, 4, 5).astype(np.float32) + 1.0 Y = np.random.rand(1, 4, 1).astype(np.float32)", "gradients Y*X^(Y-1) and X^Y * ln(X) #latter gradient is sumed", "in dc if d.device_type != caffe2_pb2.CUDA] self.assertDeviceChecks(dc_cpu_only, op, [X, Y],", "5).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\", \"Y\"], \"out\", broadcast=1) workspace.FeedBlob(\"X\",", "5).astype(np.float32) Y = np.random.rand(4, 5).astype(np.float32) op = core.CreateOperator( \"SumReduceLike\", [\"X\",", "gradient is sumed over 3, 2 and 1 dims to", "1), 0), (1, 4, 1))]) op = core.CreateOperator(\"Pow\", [\"X\", \"Y\"],", "axis=0) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y], [0]) # Set", "axis=1) def ref_op(X, Y): res = np.sum(X, axis=0) res =", "= workspace.FetchBlob(\"out\") np.testing.assert_array_almost_equal( out, X * Y[:, np.newaxis, np.newaxis, np.newaxis])", "res = np.sum(res, axis=2) np.testing.assert_array_almost_equal(out, res) self.assertDeviceChecks(dc, op, [X, Y],", "2.0 #pow op with the latter array increased by one" ]
[ "mock_dump.return_value = \"stack\" result = command.run(parsed_args) self.assertEqual(0, result) expected_calls =", "governing permissions and limitations # under the License. import unittest", "\"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result = command.run(parsed_args) self.assertEqual(0, result)", "[ mock.call( mock.ANY, \"deploy-bifrost\", ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "= [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ], ), ] self.assertEqual(expected_calls,", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump):", "[\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True)", "mock from kayobe.cli import commands from kayobe import utils class", "__init__(self): super(TestApp, self).__init__( description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils,", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run): command =", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-provide.yml\", ], ), ]", "mock_dump.return_value = { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", }", "= [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ), ] self.assertEqual(expected_calls,", "\"stack\", } } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls =", "\"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "= commands.SeedHypervisorHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value", "\"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "[ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ),", "\"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "\"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run): command = commands.OvercloudHostUpgrade(TestApp(), []) parser =", "= [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ],", "\"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\", ), mock.call(", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run): command = commands.BaremetalComputeInspect(TestApp(), [])", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ],", "command = commands.SeedHypervisorHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "\"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ),", "command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\",", "def test_seed_hypervisor_host_upgrade(self, mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\")", "mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", } ), ]", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\",", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run): command", "command = commands.ControlHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self,", "\"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", } ), ] self.assertEqual(expected_calls,", "specific language governing permissions and limitations # under the License.", "utils class TestApp(cliff.app.App): def __init__(self): super(TestApp, self).__init__( description='Test app', version='0.1',", "# not use this file except in compliance with the", "\"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\",", "= commands.SeedServiceDeploy(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "commands.OvercloudHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\",", "[ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ seed_container_image_sets", "mock.ANY, \"deploy-bifrost\", ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "\"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\": True,", "in compliance with the License. You may obtain # a", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"kayobe_ansible_user\":", "expected_calls = [ mock.call( mock.ANY, \"deploy-bifrost\", ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\",", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run, mock_run):", "result) expected_calls = [ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls,", "mock_dump.return_value = { \"controller0\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", }", "result) expected_calls = [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "You may obtain # a copy of the License at", "mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\",", "self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls,", "} ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run):", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self,", "{ \"seed\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result", "[ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ),", "mock_run): command = commands.NetworkConnectivityCheck(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump):", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run,", "\"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "[ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls,", "\"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\",", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "command = commands.SeedHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "\"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY,", "mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "commands.BaremetalComputeProvide(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "import utils class TestApp(cliff.app.App): def __init__(self): super(TestApp, self).__init__( description='Test app',", "[\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self,", "test_overcloud_host_upgrade(self, mock_run): command = commands.OvercloudHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run): command = commands.OvercloudPostConfigure(TestApp(),", "{ \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(), [])", "under the License is distributed on an \"AS IS\" BASIS,", "parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\":", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ], ), ]", "\"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\",", "result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ], ),", "command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "\"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\",", "[ mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\",", "mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "[\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ],", "\"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "def test_control_host_bootstrap(self, mock_run, mock_install): command = commands.ControlHostBootstrap(TestApp(), []) parser =", "\"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ), ]", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(),", "\"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\",", "\"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self,", "self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]),", "the License. import unittest import cliff.app import cliff.commandmanager import mock", "} } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run): command = commands.SeedHostUpgrade(TestApp(), [])", "\"controller0\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result =", "} result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY,", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run, mock_run,", "# Copyright (c) 2017 StackHPC Ltd. # # Licensed under", "\"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump): command =", "[ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ), ]", "def test_baremetal_compute_provide(self, mock_run): command = commands.BaremetalComputeProvide(TestApp(), []) parser = command.get_parser(\"test\")", "app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [", "this file except in compliance with the License. You may", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run): command", "\"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser =", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command", "\"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\": True, }", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run,", "] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run,", "[ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\",", "\"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "result) expected_calls = [ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list)", "TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run, mock_install):", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run): command = commands.BaremetalComputeManage(TestApp(), []) parser", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run): command", "StackHPC Ltd. # # Licensed under the Apache License, Version", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ]", "\"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ]", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self,", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self,", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(),", "{ \"controller0\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0, result)", "[ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run): command = commands.OvercloudHostUpgrade(TestApp(),", "command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\",", "True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self,", "file except in compliance with the License. You may obtain", "commands.SeedServiceDeploy(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "[ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils,", "commands from kayobe import utils class TestApp(cliff.app.App): def __init__(self): super(TestApp,", "= commands.SeedHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run,", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run,", "OR CONDITIONS OF ANY KIND, either express or implied. See", "the specific language governing permissions and limitations # under the", "\"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "\"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls,", "result) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\",", "mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\",", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run): command =", "\"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } }", "under the Apache License, Version 2.0 (the \"License\"); you may", "command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self,", "def test_seed_host_upgrade(self, mock_run): command = commands.SeedHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\")", "\"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "\"ansible/lvm.yml\", ], limit=\"seed\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call(", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run): command =", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ],", "\"ansible/roles\", force=True) expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"),", "mock_install): command = commands.ControlHostBootstrap(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run, mock_run,", "[ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\":", "result) expected_calls = [ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call(", "hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY,", "= command.get_parser(\"test\") parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result)", "= [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ),", "False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\":", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run): command = commands.BaremetalComputeProvide(TestApp(), [])", "Copyright (c) 2017 StackHPC Ltd. # # Licensed under the", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run): command", "\"seed\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\",", "\"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\",", "mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"deploy-bifrost\",", "to in writing, software # distributed under the License is", "command = commands.SeedHypervisorHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run): command = commands.SeedHostUpgrade(TestApp(), []) parser", "def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser", "], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run):", "mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run): command = commands.BaremetalComputeProvide(TestApp(),", "or agreed to in writing, software # distributed under the", "\"{{ seed_container_image_sets }}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "overcloud_container_image_sets }}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "required by applicable law or agreed to in writing, software", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run,", "\"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ],", "mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run,", "= command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls = [", "} result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(", "mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ),", "\"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls,", "parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\",", "parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\")", "\"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\",", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run): command = commands.BaremetalComputeManage(TestApp(),", "test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(), []) parser = command.get_parser(\"test\")", "= { \"seed\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0,", "\"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\",", "\"ansible_user\": \"stack\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "mock_dump.return_value = { \"seed\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args)", "\"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(), []) parser", "Apache License, Version 2.0 (the \"License\"); you may # not", "parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0,", "expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ), ]", "\"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "= [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-provide.yml\", ], ), ] self.assertEqual(expected_calls,", "\"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls =", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run): command =", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run): command =", "\"push_images\": True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin,", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run): command = commands.NetworkConnectivityCheck(TestApp(), []) parser", "agreed to in writing, software # distributed under the License", "seed_container_image_sets }}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "test_overcloud_container_image_build_with_regex(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "[ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls,", "mock_run): command = commands.BaremetalComputeInspect(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(),", "distributed under the License is distributed on an \"AS IS\"", "mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\",", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self,", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), [])", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\":", "mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\": \"'^regex1$", "test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser =", "test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser =", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-provide.yml\", ],", "License, Version 2.0 (the \"License\"); you may # not use", "CONDITIONS OF ANY KIND, either express or implied. See the", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run): command = commands.BaremetalComputeInspect(TestApp(),", "unittest import cliff.app import cliff.commandmanager import mock from kayobe.cli import", "} ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin,", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ],", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\",", "\"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run): command = commands.SeedHostUpgrade(TestApp(), []) parser =", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(),", "), mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ), ]", "result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls =", "], extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\": True, } ), ]", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(), []) parser", "not use this file except in compliance with the License.", "[\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump):", "\"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ),", "\"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run): command = commands.NetworkConnectivityCheck(TestApp(), []) parser =", "command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls = [ mock.call(mock.ANY,", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), [])", "\"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"])", "writing, software # distributed under the License is distributed on", "test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser =", "False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self,", "extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"),", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY,", "def test_overcloud_host_upgrade(self, mock_run): command = commands.OvercloudHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\")", "= [ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command =", "the License. You may obtain # a copy of the", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "use this file except in compliance with the License. You", "\"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\",", "{ \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", }", "self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"},", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\",", "\"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser =", "class TestApp(cliff.app.App): def __init__(self): super(TestApp, self).__init__( description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli'))", "parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\",", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(),", "extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ]", "def test_baremetal_compute_manage(self, mock_run): command = commands.BaremetalComputeManage(TestApp(), []) parser = command.get_parser(\"test\")", "command = commands.BaremetalComputeProvide(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run,", "\"ansible/lvm.yml\", ], limit=\"overcloud\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call(", "), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\")", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\",", "commands.OvercloudPostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "\"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run): command = commands.BaremetalComputeProvide(TestApp(), []) parser =", "\"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\", ),", "\"controller0\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), [])", "\"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(), []) parser =", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\")", "commands.SeedHypervisorHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value =", "= command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result = command.run(parsed_args)", "force=True) expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ]", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run,", "[ \"ansible/baremetal-compute-inspect.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "command = commands.SeedServiceDeploy(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command", "mock_dump.return_value = { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\":", "mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ), ]", "test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser =", "= parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "KIND, either express or implied. See the # License for", "command = commands.NetworkConnectivityCheck(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run):", "}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run):", "mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\",", "\"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run, mock_install): command =", "\"License\"); you may # not use this file except in", "mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\",", "from kayobe.cli import commands from kayobe import utils class TestApp(cliff.app.App):", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "commands.SeedHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value =", "mock_run, mock_install): command = commands.ControlHostBootstrap(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "@mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run, mock_install): command", "= commands.NetworkConnectivityCheck(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\")", "express or implied. See the # License for the specific", "[ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\",", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run, mock_run): command = commands.SeedServiceDeploy(TestApp(),", "result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"overcloud\",", "mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", } ),", "\"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), [])", "limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run):", "= [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "the Apache License, Version 2.0 (the \"License\"); you may #", "\"{{ overcloud_container_image_sets }}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "\"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ seed_container_image_sets }}\"),", "(c) 2017 StackHPC Ltd. # # Licensed under the Apache", "2017 StackHPC Ltd. # # Licensed under the Apache License,", "\"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "'ansible/overcloud-grafana-configure.yml' ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self,", "mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "See the # License for the specific language governing permissions", "def test_baremetal_compute_inspect(self, mock_run): command = commands.BaremetalComputeInspect(TestApp(), []) parser = command.get_parser(\"test\")", "result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-provide.yml\", ], ),", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump):", "[]) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result", "= commands.OvercloudHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value", "extra_vars={ \"container_image_sets\": ( \"{{ seed_container_image_sets }}\"), \"push_images\": False, } ),", "def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser", "= commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "mock_install): command = commands.ControlHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "\"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ seed_container_image_sets }}\"), \"push_images\": False,", "^regex2$'\", \"push_images\": True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "\"stack\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser =", "{ \"controller0\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "test_control_host_upgrade(self, mock_run, mock_install): command = commands.ControlHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\")", "\"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(),", "\"^regex2$\"]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(", "\"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), [])", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run): command =", "[ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\":", "super(TestApp, self).__init__( description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\",", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run): command", "mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self,", "} ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run):", "mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call(", "cliff.commandmanager import mock from kayobe.cli import commands from kayobe import", "mock_run, mock_install): command = commands.ControlHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "\"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "\"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\",", "self).__init__( description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True)", "host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call(", "mock_run): command = commands.SeedServiceDeploy(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\",", "law or agreed to in writing, software # distributed under", "= commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\",", "= { \"controller0\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0,", "\"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "test_overcloud_post_configure(self, mock_run): command = commands.OvercloudPostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "\"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "limitations # under the License. import unittest import cliff.app import", "\"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\", ), mock.call( mock.ANY,", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": {\"kayobe_ansible_user\": \"stack\"}", "], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self,", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": {\"kayobe_ansible_user\": \"stack\"}", "commands.NetworkConnectivityCheck(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "implied. See the # License for the specific language governing", "command = commands.ControlHostBootstrap(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "= parser.parse_args([]) mock_dump.return_value = { \"seed\": {\"kayobe_ansible_user\": \"stack\"} } result", "= [ mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa", "expected_calls = [ mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', #", "\"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "test_service_deploy(self, mock_kolla_run, mock_run): command = commands.SeedServiceDeploy(TestApp(), []) parser = command.get_parser(\"test\")", "\"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ),", "expected_calls = [ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list)", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command =", "def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser", "command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]),", "[ mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls = [", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run,", "parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result = command.run(parsed_args) self.assertEqual(0, result)", "result) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\",", "= parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls", "[ mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\",", "True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\")", "mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY,", "mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\",", "limit=\"overcloud\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [", "\"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls", "\"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), [])", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run, mock_install): command = commands.ControlHostUpgrade(TestApp(), [])", "= command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": {", "\"ansible/roles\") expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ]", "\"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [", "cliff.app import cliff.commandmanager import mock from kayobe.cli import commands from", "spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run, mock_install): command = commands.ControlHostBootstrap(TestApp(),", "\"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run, mock_install): command = commands.ControlHostUpgrade(TestApp(), []) parser", "[ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls =", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run): command = commands.BaremetalComputeManage(TestApp(), [])", "mock_run): command = commands.SeedHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"])", "{ \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result", "parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"kayobe_ansible_user\": \"stack\",", "TestApp(cliff.app.App): def __init__(self): super(TestApp, self).__init__( description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class", "mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ],", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"ansible_python_interpreter\":", "mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call(", "], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run):", "'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ), ]", "= commands.ControlHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "expected_calls = [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls,", "[ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\":", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"],", "\"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run, mock_install): command = commands.ControlHostBootstrap(TestApp(), []) parser", "= command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": {", "import cliff.commandmanager import mock from kayobe.cli import commands from kayobe", "test_seed_container_image_build_with_regex(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call(", "\"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls =", "mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "[ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "test_seed_host_upgrade(self, mock_run): command = commands.SeedHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run): command", "\"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } }", "import commands from kayobe import utils class TestApp(cliff.app.App): def __init__(self):", "}}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run, mock_run): command = commands.SeedServiceDeploy(TestApp(), [])", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command =", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(), [])", "\"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "\"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\", ), mock.call( mock.ANY,", "expected_calls = [ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls", "\"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ seed_container_image_sets }}\"), \"push_images\":", "parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result =", "command = commands.OvercloudHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY, [", "kayobe.cli import commands from kayobe import utils class TestApp(cliff.app.App): def", "parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": {\"kayobe_ansible_user\": \"stack\"} }", "\"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), [])", "mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", },", "extra_vars={ \"container_image_sets\": ( \"{{ overcloud_container_image_sets }}\"), \"push_images\": False, } ),", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ],", "= [ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls =", "\"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "commands.BaremetalComputeManage(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "# # Licensed under the Apache License, Version 2.0 (the", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump):", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run,", "'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "[ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ overcloud_container_image_sets", "[\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run):", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run): command =", "result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]),", "\"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self,", "commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\")", "[ mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml',", "command = commands.SeedHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "test_network_connectivity_check(self, mock_run): command = commands.NetworkConnectivityCheck(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self, mock_run): command = commands.BaremetalComputeProvide(TestApp(), []) parser", "\"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\",", "test_baremetal_compute_inspect(self, mock_run): command = commands.BaremetalComputeInspect(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "\"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls,", "mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }, ), ]", "test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser =", "obtain # a copy of the License at # #", "test_control_host_bootstrap(self, mock_run, mock_install): command = commands.ControlHostBootstrap(TestApp(), []) parser = command.get_parser(\"test\")", "\"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\",", "= [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ),", "[ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", },", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run,", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run,", "result) expected_calls = [ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list)", "], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self,", "def test_seed_container_image_build_with_regex(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\")", "\"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\": True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "Version 2.0 (the \"License\"); you may # not use this", "}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin,", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run,", "\"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ),", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [", "\"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "\"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "= command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls = [ mock.call(mock.ANY,", "Ltd. # # Licensed under the Apache License, Version 2.0", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-provide.yml\",", "License for the specific language governing permissions and limitations #", "limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run):", "\"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls", "\"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\", ],", "\"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls =", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run): command = commands.SeedHostUpgrade(TestApp(),", "mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls,", "# under the License. import unittest import cliff.app import cliff.commandmanager", "test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser =", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "parser.parse_args([]) mock_dump.return_value = { \"seed\": {\"kayobe_ansible_user\": \"stack\"} } result =", "mock_run, mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\":", "\"kayobe_ansible_user\": \"stack\", } } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"deploy-bifrost\", ), ]", "mock_run): command = commands.OvercloudHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "[]) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = \"stack\"", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump):", "mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run,", "\"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "= [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ], ), ] self.assertEqual(expected_calls,", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run, mock_run): command =", "\"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "result) expected_calls = [ mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml',", "\"stack\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "expected_calls = [ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY,", "under the License. import unittest import cliff.app import cliff.commandmanager import", "\"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result = command.run(parsed_args) self.assertEqual(0, result)", "mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin,", "( \"{{ seed_container_image_sets }}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls,", "\"ansible/baremetal-compute-manage.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_provide(self,", "\"stack\"}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin,", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "= commands.SeedHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value", "mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\",", "= { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } }", "mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "command = commands.OvercloudPostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "= parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\":", "[ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\":", "{\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls =", "{ \"seed\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args) self.assertEqual(0, result)", "[ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ), ]", "def test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(), []) parser =", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run): command =", "], extra_vars={ \"container_image_sets\": ( \"{{ overcloud_container_image_sets }}\"), \"push_images\": False, }", "\"stack\" result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY,", "mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"],", "extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ]", "def test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser", "parser.parse_args([]) mock_dump.return_value = \"stack\" result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "compliance with the License. You may obtain # a copy", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run): command = commands.OvercloudPostConfigure(TestApp(), []) parser", "result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ], ),", "\"container_image_sets\": ( \"{{ overcloud_container_image_sets }}\"), \"push_images\": False, } ), ]", "\"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result = command.run(parsed_args) self.assertEqual(0,", "], limit=\"seed\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY,", "mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }, ),", "= [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\",", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"ansible_python_interpreter\":", "tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "[\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY, [", "tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run): command", "commands.SeedHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "= { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\",", "\"/kolla/venv/bin/python\", } } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls =", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\",", "mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser = command.get_parser(\"test\")", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run,", "), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\",", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run, mock_dump):", "\"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"],", "parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\",", "\"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), [])", "\"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls =", "description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin,", "the # License for the specific language governing permissions and", "self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls,", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self,", "= parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\":", "command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result = command.run(parsed_args) self.assertEqual(0,", "# # Unless required by applicable law or agreed to", "parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(", "= command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": {\"kayobe_ansible_user\":", "parsed_args = parser.parse_args([]) mock_dump.return_value = \"stack\" result = command.run(parsed_args) self.assertEqual(0,", "mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", },", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command", "def test_overcloud_container_image_build(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\")", "result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\",", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run): command", "mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run, mock_install):", "), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run): command", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\")", "[ \"ansible/baremetal-compute-manage.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command", "parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls", "mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "[ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "[ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [", "spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run, mock_install): command = commands.ControlHostUpgrade(TestApp(),", "commands.ControlHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "= parser.parse_args([]) mock_dump.return_value = { \"controller0\": {\"kayobe_ansible_user\": \"stack\"} } result", "= commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "kayobe import utils class TestApp(cliff.app.App): def __init__(self): super(TestApp, self).__init__( description='Test", "[ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run): command =", "\"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result =", "mock_dump.return_value = { \"seed\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", }", "[]) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = {", "permissions and limitations # under the License. import unittest import", "def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser", "def test_overcloud_container_image_build_with_regex(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\")", "# noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "= [ mock.call(mock.ANY, hosts=\"overcloud\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls =", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run, mock_run): command", "2.0 (the \"License\"); you may # not use this file", "\"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "import mock from kayobe.cli import commands from kayobe import utils", "mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\", ), ]", "\"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ),", "= { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\",", "from kayobe import utils class TestApp(cliff.app.App): def __init__(self): super(TestApp, self).__init__(", "], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run): command = commands.NetworkConnectivityCheck(TestApp(), [])", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"kayobe_ansible_user\":", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command =", "test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser =", "limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin,", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command =", "mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ), ]", "\"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\",", "\"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), [])", "mock_run): command = commands.BaremetalComputeManage(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"seed\",", "by applicable law or agreed to in writing, software #", "parser.parse_args([]) mock_dump.return_value = { \"controller0\": {\"kayobe_ansible_user\": \"stack\"} } result =", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml',", "= [ mock.call( mock.ANY, \"deploy-bifrost\", ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ],", "mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ), ] self.assertEqual(expected_calls,", "mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\", ], limit=\"overcloud\", ),", "= command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": {\"kayobe_ansible_user\":", "limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY,", "] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_host_upgrade(self, mock_run): command =", "= { \"controller0\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } }", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "[ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [", "\"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run): command", "test_baremetal_compute_provide(self, mock_run): command = commands.BaremetalComputeProvide(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "commands.SeedHypervisorHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "\"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\", ),", "mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{", "parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": {\"kayobe_ansible_user\": \"stack\"} }", "\"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"],", "\"ansible/docker.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [", "= commands.OvercloudHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "test_overcloud_container_image_build(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "def test_network_connectivity_check(self, mock_run): command = commands.NetworkConnectivityCheck(TestApp(), []) parser = command.get_parser(\"test\")", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command", "[ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "= command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\",", "and limitations # under the License. import unittest import cliff.app", "[ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\",", "result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True) expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY,", "\"push_images\": True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "} ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run):", "), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\", ),", "= parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\":", "[ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\":", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(),", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run, mock_install): command = commands.ControlHostBootstrap(TestApp(), [])", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ),", "\"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "\"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/disable-glean.yml\", \"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\",", "import unittest import cliff.app import cliff.commandmanager import mock from kayobe.cli", "\"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\", ), mock.call(", "} ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def", "parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls =", "expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\":", "), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\")", "mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls,", "may obtain # a copy of the License at #", "\"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result =", "License. import unittest import cliff.app import cliff.commandmanager import mock from", "parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\",", "def __init__(self): super(TestApp, self).__init__( description='Test app', version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase):", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(),", "Unless required by applicable law or agreed to in writing,", "] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"deploy-bifrost\", ),", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump): command", "var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls = [ mock.call( mock.ANY,", "mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\",", "( \"{{ overcloud_container_image_sets }}\"), \"push_images\": False, } ), ] self.assertEqual(expected_calls,", "\"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\",", "commands.BaremetalComputeInspect(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "= { \"seed\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } }", "tags=\"config\", ), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\",", "commands.ControlHostBootstrap(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "= [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "\"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result =", "\"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls,", "\"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result = command.run(parsed_args) self.assertEqual(0,", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command", "\"^regex1$\", \"^regex2$\"]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [", "commands.OvercloudHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value =", "\"push_images\": False, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "\"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ overcloud_container_image_sets }}\"),", "mock_run): command = commands.BaremetalComputeProvide(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"], ), mock.call(", "test_seed_hypervisor_host_upgrade(self, mock_run): command = commands.SeedHypervisorHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "\"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), [])", "applicable law or agreed to in writing, software # distributed", "mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY,", "\"seed\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result =", "], limit=\"overcloud\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY,", "), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\",", "mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\",", "OF ANY KIND, either express or implied. See the #", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_configure(self, mock_run, mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(),", "@mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_bootstrap(self, mock_run, mock_install): command", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kayobe_venv(self,", "], extra_vars={ \"container_image_sets\": ( \"{{ seed_container_image_sets }}\"), \"push_images\": False, }", "\"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"seed\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ),", "\"run_kolla_ansible_seed\") def test_service_deploy(self, mock_kolla_run, mock_run): command = commands.SeedServiceDeploy(TestApp(), []) parser", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in writing, software # distributed under the License is distributed", "] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def", "def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser", "parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\",", "mock_kolla_run, mock_run): command = commands.SeedServiceDeploy(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "\"stack\"} } result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [", "parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"kayobe_ansible_user\": \"stack\",", "mock_dump.return_value = { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\":", "limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_hypervisor_host_upgrade(self, mock_run):", "command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = \"stack\" result = command.run(parsed_args)", "result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY,", "mock_run): command = commands.OvercloudPostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "mock_dump): command = commands.SeedHypervisorHostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args =", "command = commands.BaremetalComputeInspect(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "= parser.parse_args([]) mock_dump.return_value = { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\":", "= parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\", force=True)", "result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\"", "= command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = \"stack\" result =", "\"ansible/disable-cloud-init.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ], limit=\"overcloud\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\",", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(),", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\",", "parser.parse_args([]) mock_dump.return_value = { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\",", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run,", "parser.parse_args([\"--push\", \"^regex1$\", \"^regex2$\"]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls =", "parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = { \"controller0\":", "= commands.SeedHypervisorHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "command = commands.OvercloudHostUpgrade(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "\"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser =", "{ \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", }", "extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ]", "mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run,", "either express or implied. See the # License for the", "result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\") expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"],", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump):", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command", "[ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls", "\"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ntp.yml\", \"ansible/seed-hypervisor-libvirt-host.yml\",", "= [ mock.call( mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\",", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(),", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self,", "[\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "\"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ overcloud_container_image_sets }}\"), \"push_images\":", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "\"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls,", "= commands.ControlHostBootstrap(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run): command =", "\"ansible/baremetal-compute-inspect.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_manage(self,", "may # not use this file except in compliance with", "{ \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result = command.run(parsed_args)", "{ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } } result = command.run(parsed_args)", "parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) mock_dump.return_value = \"stack\" result", "\"container_image_sets\": ( \"{{ seed_container_image_sets }}\"), \"push_images\": False, } ), ]", "# License for the specific language governing permissions and limitations", "with the License. You may obtain # a copy of", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ]", "= [ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls", "you may # not use this file except in compliance", "result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ],", "\"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }, ), ] self.assertEqual(expected_calls,", "\"ansible/kolla-target-venv.yml\", ], limit=\"seed-hypervisor\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "\"ansible_user\": \"stack\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml',", "= commands.BaremetalComputeInspect(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run): command = commands.OvercloudHostUpgrade(TestApp(), [])", "mock.call( mock.ANY, [ 'ansible/overcloud-ipa-images.yml', 'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml'", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-manage.yml\", ], ), ]", "extra_vars={\"ansible_user\": \"stack\"}, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "\"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": ( \"{{ overcloud_container_image_sets }}\"), \"push_images\": False,", "= \"stack\" result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [", "[ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "version='0.1', command_manager=cliff.commandmanager.CommandManager('kayobe.cli')) class TestCase(unittest.TestCase): @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build(self, mock_run): command", "\"ansible/dell-switch-bmp.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call(", "} ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run):", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(),", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call(", "] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(utils, \"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self,", "mock_dump.return_value = { \"controller0\": {\"kayobe_ansible_user\": \"stack\"} } result = command.run(parsed_args)", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "\"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "= [ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [\"ansible/kolla-bifrost.yml\"],", "\"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ),", "mock.call( mock.ANY, \"deploy-bifrost\", ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin,", "\"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump):", "extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ]", "self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\",", "= commands.BaremetalComputeManage(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={\"ansible_user\": \"stack\"}, ),", "test_baremetal_compute_manage(self, mock_run): command = commands.BaremetalComputeManage(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [", "\"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", \"ansible/overcloud-docker-sdk-upgrade.yml\", \"ansible/overcloud-etc-hosts-fixup.yml\",", "= commands.BaremetalComputeProvide(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run): command = commands.BaremetalComputeInspect(TestApp(), []) parser", "= { \"controller0\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", } }", "\"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), [])", "for the specific language governing permissions and limitations # under", "[]) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result = command.run(parsed_args)", "expected_calls = [ mock.call(mock.ANY, [\"ansible/bootstrap.yml\"]), mock.call(mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"install\"), ] self.assertEqual(expected_calls,", "def test_control_host_upgrade(self, mock_run, mock_install): command = commands.ControlHostUpgrade(TestApp(), []) parser =", "\"ansible/ceph-block-devices.yml\", ], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [", "[\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\", \"ansible/docker.yml\", \"ansible/ceph-block-devices.yml\",", "= commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([\"--push\", \"^regex1$\",", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run): command = commands.OvercloudPostConfigure(TestApp(), [])", "\"'^regex1$ ^regex2$'\", \"push_images\": True, } ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "\"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin,", "parsed_args = parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) mock_install.assert_called_once_with(\"requirements.yml\", \"ansible/roles\",", "parser.parse_args([]) result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY,", "mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(), []) parser = command.get_parser(\"test\")", "self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self,", "self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_network_connectivity_check(self, mock_run): command = commands.NetworkConnectivityCheck(TestApp(),", "), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run): command", "expected_calls = [ mock.call(mock.ANY, hosts=\"seed\", tags=\"dump-config\") ] self.assertEqual(expected_calls, mock_dump.call_args_list) expected_calls", "\"run_kayobe_playbooks\") def test_baremetal_compute_manage(self, mock_run): command = commands.BaremetalComputeManage(TestApp(), []) parser =", "\"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\": True, } ),", "except in compliance with the License. You may obtain #", "= [ mock.call( mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\",", "commands.OvercloudContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result =", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.SeedHostConfigure(TestApp(),", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls,", "\"/kolla/venv/bin/python\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\", \"ansible/kolla-build.yml\", \"ansible/container-image-build.yml\" ], extra_vars={ \"container_image_sets\": (", "'ansible/overcloud-introspection-rules.yml', 'ansible/overcloud-introspection-rules-dell-lldp-workaround.yml', # noqa 'ansible/provision-net.yml', 'ansible/overcloud-grafana-configure.yml' ], ), ] self.assertEqual(expected_calls,", "command = commands.BaremetalComputeManage(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([])", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/baremetal-compute-inspect.yml\",", "), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\",", "language governing permissions and limitations # under the License. import", "License. You may obtain # a copy of the License", "\"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\", \"ansible/network.yml\", \"ansible/sysctl.yml\", \"ansible/ip-routing.yml\", \"ansible/snat.yml\", \"ansible/disable-glean.yml\", \"ansible/ntp.yml\", \"ansible/lvm.yml\", ],", "{ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result", "<filename>kayobe/tests/unit/cli/test_commands.py # Copyright (c) 2017 StackHPC Ltd. # # Licensed", "mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_seed_container_image_build_with_regex(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), [])", "ANY KIND, either express or implied. See the # License", "# distributed under the License is distributed on an \"AS", "def test_seed_container_image_build(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\")", "mock_dump.return_value = { \"seed\": { \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", }", "test_seed_container_image_build(self, mock_run): command = commands.SeedContainerImageBuild(TestApp(), []) parser = command.get_parser(\"test\") parsed_args", "\"run_kayobe_playbooks\") def test_overcloud_post_configure(self, mock_run): command = commands.OvercloudPostConfigure(TestApp(), []) parser =", "# Unless required by applicable law or agreed to in", "[ mock.call(mock.ANY, [\"ansible/network-connectivity.yml\"]), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\")", "\"/kayobe/venv/bin/python\", \"kayobe_ansible_user\": \"stack\", \"kolla_ansible_target_venv\": \"/kolla/venv/bin/python\", } } result = command.run(parsed_args)", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run): command = commands.OvercloudHostUpgrade(TestApp(), []) parser", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure(self, mock_kolla_run, mock_run, mock_dump): command =", "expected_calls = [ mock.call( mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\",", "mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY, \"deploy-bifrost\", ), ] self.assertEqual(expected_calls,", "], limit=\"overcloud\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build(self,", "limit=\"seed\", ), mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "\"deploy-bifrost\", ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\") @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin,", "def test_overcloud_post_configure(self, mock_run): command = commands.OvercloudPostConfigure(TestApp(), []) parser = command.get_parser(\"test\")", "import cliff.app import cliff.commandmanager import mock from kayobe.cli import commands", "mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\", ), mock.call( mock.ANY, [ \"ansible/kolla-target-venv.yml\", \"ansible/kolla-host.yml\",", "\"run_kayobe_playbooks\") def test_overcloud_container_image_build(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser =", "@mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(),", "[\"ansible/kolla-bifrost.yml\"], ), mock.call( mock.ANY, [ \"ansible/overcloud-host-image-workaround-resolv.yml\", \"ansible/seed-introspection-rules.yml\", \"ansible/dell-switch-bmp.yml\", ], ),", "extra_vars={ \"container_image_regexes\": \"'^regex1$ ^regex2$'\", \"push_images\": True, } ), ] self.assertEqual(expected_calls,", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_seed\") def test_seed_host_configure(self, mock_kolla_run, mock_run, mock_dump): command", "\"run_kayobe_playbooks\") @mock.patch.object(commands.KollaAnsibleMixin, \"run_kolla_ansible_overcloud\") def test_overcloud_host_configure_kolla_venv(self, mock_kolla_run, mock_run, mock_dump): command =", "] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_host_upgrade(self, mock_run): command =", "@mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_overcloud_container_image_build_with_regex(self, mock_run): command = commands.OvercloudContainerImageBuild(TestApp(), []) parser", "[ mock.call( mock.ANY, [ \"ansible/baremetal-compute-provide.yml\", ], ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "\"galaxy_install\", spec=True) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_playbooks\") def test_control_host_upgrade(self, mock_run, mock_install): command =", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [ \"ansible/container-image-builders-check.yml\",", "= commands.OvercloudPostConfigure(TestApp(), []) parser = command.get_parser(\"test\") parsed_args = parser.parse_args([]) result", "command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call( mock.ANY, [\"ansible/kolla-ansible.yml\"], tags=\"config\",", "mock.ANY, \"bootstrap-servers\", extra_vars={ \"ansible_python_interpreter\": \"/usr/bin/python\", \"ansible_user\": \"stack\", \"virtualenv\": \"/kolla/venv/bin/python\", }", "mock.ANY, [ \"ansible/kayobe-target-venv.yml\", \"ansible/kolla-target-venv.yml\", ], limit=\"seed\", ), ] self.assertEqual(expected_calls, mock_run.call_args_list)", "\"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", }, ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list) @mock.patch.object(commands.KayobeAnsibleMixin, \"run_kayobe_config_dump\")", "extra_vars={ \"ansible_python_interpreter\": \"/kayobe/venv/bin/python\", \"ansible_user\": \"stack\", } ), ] self.assertEqual(expected_calls, mock_kolla_run.call_args_list)", "result = command.run(parsed_args) self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, host=\"seed-hypervisor\",", "mock.ANY, [ \"ansible/ip-allocation.yml\", \"ansible/ssh-known-host.yml\", \"ansible/kayobe-ansible-user.yml\", \"ansible/kayobe-target-venv.yml\", \"ansible/users.yml\", \"ansible/yum.yml\", \"ansible/dev-tools.yml\", \"ansible/disable-selinux.yml\",", "self.assertEqual(0, result) expected_calls = [ mock.call(mock.ANY, host=\"seed-hypervisor\", var_name=\"kayobe_ansible_user\", tags=\"dump-config\") ]", "\"run_kayobe_playbooks\") def test_baremetal_compute_inspect(self, mock_run): command = commands.BaremetalComputeInspect(TestApp(), []) parser =", "= parser.parse_args([]) mock_dump.return_value = \"stack\" result = command.run(parsed_args) self.assertEqual(0, result)", "], ), ] self.assertEqual(expected_calls, mock_run.call_args_list) expected_calls = [ mock.call( mock.ANY,", "def test_overcloud_host_configure_both_venvs(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser", "or implied. See the # License for the specific language", "def test_service_deploy(self, mock_kolla_run, mock_run): command = commands.SeedServiceDeploy(TestApp(), []) parser =", "def test_overcloud_host_configure_kayobe_venv(self, mock_kolla_run, mock_run, mock_dump): command = commands.OvercloudHostConfigure(TestApp(), []) parser" ]
[ "StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if is_tf2: return [d for d in", "or subclassed model.\"\"\" return (layer.__module__.find('keras.engine') == -1 and layer.__module__.find('keras.layers') ==", "\"\"\"Returns True if the object is a subclassed layer or", "the MIT License. See License.txt in the project root for", "distutils.version import StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape):", "from distutils.version import StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def", "tensor_shape] def dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph) -> None _tb_log_dir =", "def is_subclassed(layer): \"\"\"Returns True if the object is a subclassed", "in tensor_shape] else: return [d.value for d in tensor_shape] def", "os import tensorflow as _tf from distutils.version import StrictVersion is_tf2", "_tf from distutils.version import StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0')", "and layer.__module__.find('keras.layers') == -1) else: tensorflow = _tf def is_subclassed(layer):", "is_tf2: tensorflow = _tf.compat.v1 def is_subclassed(layer): \"\"\"Returns True if the", "write_graph(tf_graph) else: from tensorflow.python.summary import summary pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph)", "# license information. ############################################################################### import os import tensorflow as _tf", "StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if is_tf2: return [d for", "import summary pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow =", "else: from tensorflow.python.summary import summary pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if", "_tf.compat.v1 def is_subclassed(layer): \"\"\"Returns True if the object is a", "_tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph) else: from tensorflow.python.summary import summary pb_visual_writer", "return [d for d in tensor_shape] else: return [d.value for", "[d for d in tensor_shape] else: return [d.value for d", "in tensor_shape] def dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph) -> None _tb_log_dir", "= _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph) else: from tensorflow.python.summary import summary", "# type: (_tf.Graph) -> None _tb_log_dir = os.environ.get('TB_LOG_DIR') if _tb_log_dir:", "return (layer.__module__.find('keras.engine') == -1 and layer.__module__.find('keras.layers') == -1) else: tensorflow", "type: (_tf.Graph) -> None _tb_log_dir = os.environ.get('TB_LOG_DIR') if _tb_log_dir: if", "a subclassed layer or subclassed model.\"\"\" return (layer.__module__.find('keras.engine') == -1", "= _tf.compat.v1 def is_subclassed(layer): \"\"\"Returns True if the object is", "tensorflow.python.ops.summary_ops_v2 import graph as write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default():", "MIT License. See License.txt in the project root for #", "(layer.__module__.find('keras.engine') == -1 and layer.__module__.find('keras.layers') == -1) else: tensorflow =", "def normalize_tensor_shape(tensor_shape): if is_tf2: return [d for d in tensor_shape]", "d in tensor_shape] else: return [d.value for d in tensor_shape]", "is_tf2: from tensorflow.python.ops.summary_ops_v2 import graph as write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir)", "graph as write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph) else:", "(_tf.Graph) -> None _tb_log_dir = os.environ.get('TB_LOG_DIR') if _tb_log_dir: if is_tf2:", "with pb_visual_writer.as_default(): write_graph(tf_graph) else: from tensorflow.python.summary import summary pb_visual_writer =", "if _tb_log_dir: if is_tf2: from tensorflow.python.ops.summary_ops_v2 import graph as write_graph", "== -1 and layer.__module__.find('keras.layers') == -1) else: tensorflow = _tf", "d in tensor_shape] def dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph) -> None", "See License.txt in the project root for # license information.", "subclassed model.\"\"\" return (layer.__module__.find('keras.engine') == -1 and layer.__module__.find('keras.layers') == -1)", "in the project root for # license information. ############################################################################### import", "os.environ.get('TB_LOG_DIR') if _tb_log_dir: if is_tf2: from tensorflow.python.ops.summary_ops_v2 import graph as", "normalize_tensor_shape(tensor_shape): if is_tf2: return [d for d in tensor_shape] else:", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "[d.value for d in tensor_shape] def dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph)", "from tensorflow.python.ops.summary_ops_v2 import graph as write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with", "license information. ############################################################################### import os import tensorflow as _tf from", "License.txt in the project root for # license information. ###############################################################################", "= os.environ.get('TB_LOG_DIR') if _tb_log_dir: if is_tf2: from tensorflow.python.ops.summary_ops_v2 import graph", "for d in tensor_shape] else: return [d.value for d in", "tensorflow as _tf from distutils.version import StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0])", "= summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow = _tf.compat.v1 def is_subclassed(layer):", "reserved. # Licensed under the MIT License. See License.txt in", "from tensorflow.python.summary import summary pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2:", "_tb_log_dir: if is_tf2: from tensorflow.python.ops.summary_ops_v2 import graph as write_graph pb_visual_writer", "the object is a subclassed layer or subclassed model.\"\"\" return", "dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph) -> None _tb_log_dir = os.environ.get('TB_LOG_DIR') if", "if is_tf2: tensorflow = _tf.compat.v1 def is_subclassed(layer): \"\"\"Returns True if", "layer.__module__.find('keras.layers') == -1) else: tensorflow = _tf def is_subclassed(layer): return", "is a subclassed layer or subclassed model.\"\"\" return (layer.__module__.find('keras.engine') ==", "Corporation. All rights reserved. # Licensed under the MIT License.", "import os import tensorflow as _tf from distutils.version import StrictVersion", "# Licensed under the MIT License. See License.txt in the", "import tensorflow as _tf from distutils.version import StrictVersion is_tf2 =", "is_subclassed(layer): \"\"\"Returns True if the object is a subclassed layer", "pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow = _tf.compat.v1 def is_subclassed(layer): \"\"\"Returns True", "pb_visual_writer.as_default(): write_graph(tf_graph) else: from tensorflow.python.summary import summary pb_visual_writer = summary.FileWriter(_tb_log_dir)", "-> None _tb_log_dir = os.environ.get('TB_LOG_DIR') if _tb_log_dir: if is_tf2: from", "tensor_shape] else: return [d.value for d in tensor_shape] def dump_graph_into_tensorboard(tf_graph):", "pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow = _tf.compat.v1 def", "for # license information. ############################################################################### import os import tensorflow as", "tensorflow = _tf.compat.v1 def is_subclassed(layer): \"\"\"Returns True if the object", "summary pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow = _tf.compat.v1", "the project root for # license information. ############################################################################### import os", "if the object is a subclassed layer or subclassed model.\"\"\"", "subclassed layer or subclassed model.\"\"\" return (layer.__module__.find('keras.engine') == -1 and", "License. See License.txt in the project root for # license", "tensorflow.python.summary import summary pb_visual_writer = summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow", "############################################################################### import os import tensorflow as _tf from distutils.version import", "root for # license information. ############################################################################### import os import tensorflow", "= StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if is_tf2: return [d", "for d in tensor_shape] def dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph) ->", "write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph) else: from tensorflow.python.summary", "import StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if", "summary.FileWriter(_tb_log_dir) pb_visual_writer.add_graph(tf_graph) if is_tf2: tensorflow = _tf.compat.v1 def is_subclassed(layer): \"\"\"Returns", "True if the object is a subclassed layer or subclassed", "if is_tf2: from tensorflow.python.ops.summary_ops_v2 import graph as write_graph pb_visual_writer =", "############################################################################### # Copyright (c) Microsoft Corporation. All rights reserved. #", "under the MIT License. See License.txt in the project root", "is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if is_tf2: return", "pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph) else: from tensorflow.python.summary import", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "All rights reserved. # Licensed under the MIT License. See", "if is_tf2: return [d for d in tensor_shape] else: return", "_tb_log_dir = os.environ.get('TB_LOG_DIR') if _tb_log_dir: if is_tf2: from tensorflow.python.ops.summary_ops_v2 import", "as _tf from distutils.version import StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >=", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Licensed under the MIT License. See License.txt in the project", "model.\"\"\" return (layer.__module__.find('keras.engine') == -1 and layer.__module__.find('keras.layers') == -1) else:", "rights reserved. # Licensed under the MIT License. See License.txt", "project root for # license information. ############################################################################### import os import", "information. ############################################################################### import os import tensorflow as _tf from distutils.version", "== -1) else: tensorflow = _tf def is_subclassed(layer): return False", "import graph as write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph)", "StrictVersion is_tf2 = StrictVersion(_tf.__version__.split('-')[0]) >= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if is_tf2:", "None _tb_log_dir = os.environ.get('TB_LOG_DIR') if _tb_log_dir: if is_tf2: from tensorflow.python.ops.summary_ops_v2", "object is a subclassed layer or subclassed model.\"\"\" return (layer.__module__.find('keras.engine')", "is_tf2: return [d for d in tensor_shape] else: return [d.value", ">= StrictVersion('2.0.0') def normalize_tensor_shape(tensor_shape): if is_tf2: return [d for d", "as write_graph pb_visual_writer = _tf.summary.create_file_writer(_tb_log_dir) with pb_visual_writer.as_default(): write_graph(tf_graph) else: from", "return [d.value for d in tensor_shape] def dump_graph_into_tensorboard(tf_graph): # type:", "else: return [d.value for d in tensor_shape] def dump_graph_into_tensorboard(tf_graph): #", "def dump_graph_into_tensorboard(tf_graph): # type: (_tf.Graph) -> None _tb_log_dir = os.environ.get('TB_LOG_DIR')", "layer or subclassed model.\"\"\" return (layer.__module__.find('keras.engine') == -1 and layer.__module__.find('keras.layers')", "-1 and layer.__module__.find('keras.layers') == -1) else: tensorflow = _tf def", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under" ]
[ "request_size must be configured\") print(\"Starting the HTTP client\") http_client.Start() http_client_result", "- total duration of all sessions http_client.DurationSet(self.duration) # - how", "byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower server %s...\" % self.server_address) self.server =", "Port. # Options are 'DHCPv4', 'DHCPv6', 'SLAAC', 'static' # if", "# 'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], # if staticv6, use [\"ipaddress\",", "connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\",", "prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port range the", "# HTTP Method can be GET or PUT # -", "MultiServer/MultiClient for the ByteBlower Python API. All examples are guaranteed", "port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet() elif ip_config.lower() == \"slaac\": port_l3 =", "# 'http_method': 'PUT', # total duration, in nanoseconds. # This", "print(\"-\" * 10) http_client.Stop() http_server.Stop() print(\"Stopped the HTTP client\") request_status_value", "stand-alone, the run-function must be # called. This approach makes", "Will be used as HTTP server. 'port_1_config': { 'interface': 'trunk-1-13',", "self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration'] self.session_duration = kwargs['session_duration'] self.session_size", "% self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address) # Create the port which", "= byteblower_instance.ServerAdd(self.server_address) # Create the port which will be the", "'mac': '00:bb:01:00:00:02', # IP configuration for the ByteBlower Port. #", "print(\" connections established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections", "above Copyright 2018, Ex<NAME>. \"\"\" # Needed for python2 /", "http_client_result = http_client.ResultGet() for iteration in range(10): time.sleep(1) http_client_result.Refresh() print(\"-\"", "10) http_client.Stop() http_server.Stop() print(\"Stopped the HTTP client\") request_status_value = http_client.StatusGet()", "\"dhcpv6\" # ip': 'dhcpv6', # if SLAAC, use \"slaac\" #", "time configuration = { # Address (IP or FQDN) of", "of the ByteBlower server to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration", "import ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration =", "used as HTTP server. 'port_1_config': { 'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01',", "= ip_config[0] if not isinstance(config['ip_address'], str): ip = config['ip_address'][0] if", "port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config = config['ip'] if not isinstance(ip_config,", "= None def run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower", "in ip: config['ip_address'] = ip.split('/')[0] print(\"Created port\", port.DescriptionGet()) return port", "server http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port'] if server_tcp_port is", "endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure the direction. # If the", "port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet() elif ip_config.lower() ==", "compatibility from __future__ import print_function # import the ByteBlower module", "None: # let the HTTP Client request a page of", "IP configuration for the ByteBlower Port. # Options are 'DHCPv4',", "are guaranteed to work with Python 2.7 and above Copyright", "/ python3 print function compatibility from __future__ import print_function #", "# IP configuration for the ByteBlower Port. # Options are", "# Static configuration if len(ip_config) == 3: # IPv4 port_l3", "the API from byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method'] self.http_method", "self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address'] # create a HTTP server http_server", "many connections can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how many", "no limit 'max_total_sessions': 0, # TOS value to use on", "can be GET or PUT # - GET: Standard HTTP", "'PUT', # total duration, in nanoseconds. # This is the", "self.port_1_config['ip_address'] # create a HTTP server http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port", "HTTP Client request a page of a specific size... http_client.SessionSizeSet(self.session_size)", "for the first ByteBlower port. # Will be used as", "[\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port range", "http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10) http_client.Stop() http_server.Stop() print(\"Stopped the HTTP client\")", "of sessions # No more than this number of sessions", "http_client = self.port_2.ProtocolHttpMultiClientAdd() # - remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) #", "server http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet()) # let", "= kwargs['server_address'] self.port_1_config = kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config'] # Helper", "- how many connections can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # -", "port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet() elif len(ip_config) == 2: port_l3 =", "= port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet() elif ip_config.lower() == \"dhcpv6\":", "# IPv6 address = ip_config[0] prefix_length = ip_config[1] ip =", "HTTP Server port\") self.port_1 = self.provision_port(self.port_1_config) print(\"Creating HTTP Client port\")", "if not isinstance(config['ip_address'], str): ip = config['ip_address'][0] if '/' in", "of bytes transmitted by a session 'session_size': 1 * 1000", "from __future__ import print_function # import the ByteBlower module import", "as HTTP client. 'port_2_config': { 'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02', #", "{ # Address (IP or FQDN) of the ByteBlower server", "server. 'port_1_config': { 'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01', # IP configuration", "PUT, # traffic will flow from the HTTP client to", "HTTP server listen for requests http_server.Start() # - total duration", "= http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed", "SLAAC, use \"slaac\" # 'ip': 'slaac', # if staticv4, use", "DHCPv6, use \"dhcpv6\" # ip': 'dhcpv6', # if SLAAC, use", "== 2: port_l3 = port.Layer3IPv6Set() # IPv6 address = ip_config[0]", "create a HTTP server http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port']", "server, this will be the port on which the server", "DHCPv4, use \"dhcpv4\" 'ip': 'dhcpv4', # if DHCPv6, use \"dhcpv6\"", "http_client_result.ConnectionsAttemptedGet()) print(\" connections established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\"", "byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower server %s...\" % self.server_address)", "self.server = byteblower_instance.ServerAdd(self.server_address) # Create the port which will be", "print(\"Server Result data : {}\".format(http_server_result.DescriptionGet())) return [ self.duration, self.session_duration, self.session_size,", "import time configuration = { # Address (IP or FQDN)", "is not None: http_server.PortSet(server_tcp_port) else: server_tcp_port = http_server.PortGet() # create", "duration, in nanoseconds. # This is the duration of the", "= kwargs['duration'] self.session_duration = kwargs['session_duration'] self.session_size = kwargs['session_size'] self.max_concurrent_sessions =", "flow from the HTTP client to the HTTP server http_client.HttpMethodSet(self.http_method)", "the HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet())", "configuration if len(ip_config) == 3: # IPv4 port_l3 = port.Layer3IPv4Set()", "in a series of # examples. if __name__ == \"__main__\":", "from the web server # - PUT: Standard HTTP upload,", "Configuration for the second ByteBlower port. # Will be used", "by the HTTP connection. # On the HTTP server, this", "the HTTP server to the HTTP client # If the", "= byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower server %s...\" % self.server_address) self.server", "len(ip_config) == 3: # IPv4 port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1])", "'ip': 'slaac', # if staticv4, use [\"ipaddress\", netmask, gateway] #", "request_status_value = http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet()", "'tos': 0 } class Example: def __init__(self, **kwargs): self.server_address =", "concurrent sessions # Maximum number of sessions that will be", "# if staticv4, use [\"ipaddress\", netmask, gateway] # 'ip': ['192.168.0.2',", "- remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # - local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'],", "running at the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual duration,", "self.port_2.ProtocolHttpMultiClientAdd() # - remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # - local", "the port which will be the HTTP client (port_2) self.port_2", "a HTTP server http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port'] if", "self.port_2_config = kwargs['port_2_config'] # Helper function, we can use this", "[ self.duration, self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed, rx_speed,", "\"255.255.255.0\", \"192.168.0.1\"], # if staticv6, use [\"ipaddress\", prefixlength] # 'ip':", "Client http_client = self.port_2.ProtocolHttpMultiClientAdd() # - remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port)", "sessions will be created # 0 means no limit 'max_total_sessions':", "ByteBlower server %s...\" % self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address) # Create", "duration # to download... http_client.SessionDurationSet(self.session_duration) elif self.session_size is not None:", "Method can be GET or PUT # - GET: Standard", "with # the HTTP server 'tcp_port_min': 32000, 'tcp_port_max': 50000 },", "['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port number to be used by", "HTTP connection. # On the HTTP server, this will be", "HTTP client (and server) 'tos': 0 } class Example: def", "= kwargs['session_duration'] self.session_size = kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions =", "many connections can be running at the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions)", "10000000000, # session duration, in nanoseconds # Duration of the", "{}\".format(http_client_result.DescriptionGet())) print(\"Server Result data : {}\".format(http_server_result.DescriptionGet())) return [ self.duration, self.session_duration,", "http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # - local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure", "config['ip'] if not isinstance(ip_config, list): # Config is not static,", "\"\"\" # Needed for python2 / python3 print function compatibility", "in bytes # The number of bytes transmitted by a", "prefix_length = ip_config[1] ip = \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] =", "the HTTP Method to the # enumeration used by the", "port = self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config = config['ip']", "- how many connections can be running at the same", "This approach makes it possible to include it in a", "sessions # 'session_duration': 1500000000, 'session_duration': None, # session size, in", "port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet() elif len(ip_config) ==", "byteblowerll.byteblower as byteblower import time configuration = { # Address", "If the HTTP Method is GET, # traffic will flow", "self.server.PortDestroy(self.port_2) self.port_2 = None if self.server is not None: byteblower_instance.ServerRemove(self.server)", "the HTTP client (port_2) self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address']", "<gh_stars>1-10 \"\"\" HTTP MultiServer/MultiClient for the ByteBlower Python API. All", "else: server_tcp_port = http_server.PortGet() # create a HTTP Client http_client", "connections can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how many connections", "Standard HTTP download, we retrieve data from the web server", "data from the web server # - PUT: Standard HTTP", "ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration'] self.session_duration = kwargs['session_duration'] self.session_size = kwargs['session_size']", "can use this to parse the HTTP Method to the", "# the HTTP server 'tcp_port_min': 32000, 'tcp_port_max': 50000 }, #", "print(\"Creating HTTP Server port\") self.port_1 = self.provision_port(self.port_1_config) print(\"Creating HTTP Client", "port range the HTTP Clients will use to connect with", "tx_speed, rx_speed, request_status_value ] def provision_port(self, config): port = self.server.PortCreate(config['interface'])", "will be created # 0 means no limit 'max_total_sessions': 0,", "Helper function, we can use this to parse the HTTP", "flow from the HTTP server to the HTTP client #", "= None self.port_1 = None self.port_2 = None def cleanup(self):", "'max_concurrent_sessions': 100, # maximum number of sessions # No more", "self.max_total_sessions = kwargs['max_total_sessions'] self.tos = kwargs['tos'] self.server = None self.port_1", "duration, can be size-based or time-based if self.session_duration is not", "# if SLAAC, use \"slaac\" # 'ip': 'slaac', # if", "kwargs['tos'] self.server = None self.port_1 = None self.port_2 = None", "'trunk-1-13', 'mac': '00:bb:01:00:00:01', # IP configuration for the ByteBlower Port.", "'duration': 10000000000, # session duration, in nanoseconds # Duration of", "ByteBlower Python API. All examples are guaranteed to work with", "self.provision_port(self.port_1_config) print(\"Creating HTTP Client port\") # Create the port which", "objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1) self.port_1 = None", "the HTTP Client request a page of a specific size...", "Configuration for the first ByteBlower port. # Will be used", "download, we retrieve data from the web server # -", "= kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config'] # Helper function, we can", "self.server is not None: byteblower_instance.ServerRemove(self.server) self.server = None def run(self):", "return port # When this python module is called stand-alone,", "server listen for requests http_server.Start() # - total duration of", "32000, 'tcp_port_max': 50000 }, # HTTP Method # HTTP Method", "When this duration expires, # all sessions will be stopped.", "server) 'tos': 0 } class Example: def __init__(self, **kwargs): self.server_address", "\"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0] if not isinstance(config['ip_address'], str):", "http_server_result = http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration : {} nanoseconds\".format(self.duration)) print(\"Status", "self.duration, self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed, rx_speed, request_status_value", "# TOS value to use on the HTTP client (and", "# session size, in bytes # The number of bytes", "http_server.Start() # - total duration of all sessions http_client.DurationSet(self.duration) #", "HTTP download, we retrieve data from the web server #", "4096 }, # Configuration for the second ByteBlower port. #", "means no limit 'max_total_sessions': 0, # TOS value to use", "The number of bytes transmitted by a session 'session_size': 1", "a specific duration # to download... http_client.SessionDurationSet(self.session_duration) elif self.session_size is", "size... http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either duration or request_size must be", "be used as HTTP server. 'port_1_config': { 'interface': 'trunk-1-13', 'mac':", "# total duration, in nanoseconds. # This is the duration", "by the API from byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method']", "http_server_ip_address = self.port_1_config['ip_address'] # create a HTTP server http_server =", "'ip': 'dhcpv6', # if SLAAC, use \"slaac\" # 'ip': 'slaac',", "download... http_client.SessionDurationSet(self.session_duration) elif self.session_size is not None: # let the", "HTTP client\") request_status_value = http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes", "# Configuration for the first ByteBlower port. # Will be", "print_function # import the ByteBlower module import byteblowerll.byteblower as byteblower", "as HTTP server. 'port_1_config': { 'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01', #", "it possible to include it in a series of #", "Server port\") self.port_1 = self.provision_port(self.port_1_config) print(\"Creating HTTP Client port\") #", "'port_2_config': { 'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02', # IP configuration for", "# 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port number to be", "with Python 2.7 and above Copyright 2018, Ex<NAME>. \"\"\" #", "more than this number of sessions will be created #", "connections established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections refused\",", "Address (IP or FQDN) of the ByteBlower server to use", "print function compatibility from __future__ import print_function # import the", "Method # HTTP Method can be GET or PUT #", "None, # max concurrent sessions # Maximum number of sessions", "# if DHCPv6, use \"dhcpv6\" # 'ip': 'dhcpv6', # if", "this will be the port on which the server #", "= ip_config[0] prefix_length = ip_config[1] ip = \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip)", "possible to include it in a series of # examples.", "This is the duration of the flow. When this duration", "'00:bb:01:00:00:02', # IP configuration for the ByteBlower Port. # Options", "wireless endpoint will push data to the # webserver 'http_method':", "'trunk-1-25', 'mac': '00:bb:01:00:00:02', # IP configuration for the ByteBlower Port.", "or time-based if self.session_duration is not None: # let the", "Duration of the individual sessions # 'session_duration': 1500000000, 'session_duration': None,", "size, in bytes # The number of bytes transmitted by", "http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration : {} nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string))", "= port_l3.IpDhcpGet() elif ip_config.lower() == \"slaac\": port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration()", "# Configure the direction. # If the HTTP Method is", "= http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration : {} nanoseconds\".format(self.duration)) print(\"Status :", "{ 'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02', # IP configuration for the", "port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet() elif ip_config.lower() == \"slaac\": port_l3", "ip_config.lower() == \"dhcpv6\": port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet()", "ip.split('/')[0] print(\"Created port\", port.DescriptionGet()) return port # When this python", "same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual duration, can be size-based", "the ByteBlower Python API. All examples are guaranteed to work", "list): # Config is not static, DHCP or slaac if", "# - GET: Standard HTTP download, we retrieve data from", "'session_duration': None, # session size, in bytes # The number", "GET: Standard HTTP download, we retrieve data from the web", "will use to connect with # the HTTP server 'tcp_port_min':", "be created # 0 means no limit 'max_total_sessions': 0, #", "or slaac if ip_config.lower() == \"dhcpv4\": port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform()", "= port_l3.IpStatelessGet() else: # Static configuration if len(ip_config) == 3:", "configuration for the ByteBlower Port. # Options are 'DHCPv4', 'DHCPv6',", "self.port_2: self.server.PortDestroy(self.port_2) self.port_2 = None if self.server is not None:", "Method is PUT, # traffic will flow from the HTTP", "will flow from the HTTP client to the HTTP server", ": {}\".format(request_status_string)) print(\"Client Result data : {}\".format(http_client_result.DescriptionGet())) print(\"Server Result data", "to parse the HTTP Method to the # enumeration used", "http_client.Stop() http_server.Stop() print(\"Stopped the HTTP client\") request_status_value = http_client.StatusGet() request_status_string", "self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed, rx_speed, request_status_value ]", "use on the HTTP client (and server) 'tos': 0 }", "guaranteed to work with Python 2.7 and above Copyright 2018,", "'64'], # TCP port number to be used by the", "connection. # On the HTTP server, this will be the", "be running simultaneously 'max_concurrent_sessions': 100, # maximum number of sessions", "'max_total_sessions': 0, # TOS value to use on the HTTP", "print(\"Creating HTTP Client port\") # Create the port which will", "http_server_result.Refresh() print(\"Requested Duration : {} nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string)) print(\"Client", "Result data : {}\".format(http_server_result.DescriptionGet())) return [ self.duration, self.session_duration, self.session_size, self.max_total_sessions,", "be stopped. 'duration': 10000000000, # session duration, in nanoseconds #", "http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\"", "'DHCPv4', 'DHCPv6', 'SLAAC', 'static' # if DHCPv4, use \"dhcpv4\" 'ip':", "ByteBlower port. # Will be used as HTTP client. 'port_2_config':", "individual sessions # 'session_duration': 1500000000, 'session_duration': None, # session size,", "'64'], # TCP port range the HTTP Clients will use", "called. This approach makes it possible to include it in", "= byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes", "traffic will flow from the HTTP server to the HTTP", "= self.port_1_config['ip_address'] # create a HTTP server http_server = self.port_1.ProtocolHttpMultiServerAdd()", "duration expires, # all sessions will be stopped. 'duration': 10000000000,", "request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet()", "address = ip_config[0] prefix_length = ip_config[1] ip = \"{}/{}\".format(address, prefix_length)", "byteblower_instance.ServerRemove(self.server) self.server = None def run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting", "endpoint will push data to the # webserver 'http_method': 'GET',", "http_client.SessionDurationSet(self.session_duration) elif self.session_size is not None: # let the HTTP", "len(ip_config) == 2: port_l3 = port.Layer3IPv6Set() # IPv6 address =", "['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], # if staticv6, use [\"ipaddress\", prefixlength] #", "# - PUT: Standard HTTP upload, the wireless endpoint will", "HTTP Method to the # enumeration used by the API", "# 'session_duration': 1500000000, 'session_duration': None, # session size, in bytes", "[\"ipaddress\", netmask, gateway] # 'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], # if", "to connect with # the HTTP server 'tcp_port_min': 32000, 'tcp_port_max':", "simultaneously 'max_concurrent_sessions': 100, # maximum number of sessions # No", "to the HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client port:\",", "http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration : {} nanoseconds\".format(self.duration))", "of all sessions http_client.DurationSet(self.duration) # - how many connections can", "port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet() elif len(ip_config) == 2: port_l3", "will be the HTTP server (port_1) print(\"Creating HTTP Server port\")", "* 1000, # 'session_size': None, # max concurrent sessions #", "\"\"\" HTTP MultiServer/MultiClient for the ByteBlower Python API. All examples", "http_server.PortSet(server_tcp_port) else: server_tcp_port = http_server.PortGet() # create a HTTP Client", "ip_config[1] ip = \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0] if", "makes it possible to include it in a series of", "a series of # examples. if __name__ == \"__main__\": example", "ip_config[0] if not isinstance(config['ip_address'], str): ip = config['ip_address'][0] if '/'", "= port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet() elif ip_config.lower() == \"slaac\":", "# Create the port which will be the HTTP server", "= http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed =", "ip = config['ip_address'][0] if '/' in ip: config['ip_address'] = ip.split('/')[0]", "client (and server) 'tos': 0 } class Example: def __init__(self,", "self.tos = kwargs['tos'] self.server = None self.port_1 = None self.port_2", "which the server # listens. 'tcp_port': 4096 }, # Configuration", "self.port_2_config['tcp_port_max']) # Configure the direction. # If the HTTP Method", "server to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for the first", "use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for the first ByteBlower port.", "local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure the direction. # If", "server (port_1) print(\"Creating HTTP Server port\") self.port_1 = self.provision_port(self.port_1_config) print(\"Creating", "if self.session_duration is not None: # let the HTTP Client", "can be size-based or time-based if self.session_duration is not None:", "of the flow. When this duration expires, # all sessions", "staticv6, use [\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP", "if staticv6, use [\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], #", "Config is not static, DHCP or slaac if ip_config.lower() ==", "# webserver 'http_method': 'GET', # 'http_method': 'PUT', # total duration,", "= ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration'] self.session_duration = kwargs['session_duration'] self.session_size =", "which will be the HTTP server (port_1) print(\"Creating HTTP Server", "print(\"Connecting to ByteBlower server %s...\" % self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address)", "# On the HTTP server, this will be the port", "# Address (IP or FQDN) of the ByteBlower server to", "client\") request_status_value = http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes =", "staticv4, use [\"ipaddress\", netmask, gateway] # 'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"],", "value to use on the HTTP client (and server) 'tos':", "on the HTTP client (and server) 'tos': 0 } class", "created # 0 means no limit 'max_total_sessions': 0, # TOS", "print(\"Stopped the HTTP client\") request_status_value = http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value)", "None if self.server is not None: byteblower_instance.ServerRemove(self.server) self.server = None", "'static' # if DHCPv4, use \"dhcpv4\" 'ip': 'dhcpv4', # if", "# traffic will flow from the HTTP client to the", "duration or request_size must be configured\") print(\"Starting the HTTP client\")", "client (port_2) self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address'] # create", "self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed, rx_speed, request_status_value ] def provision_port(self,", "iteration+1) print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections established\", http_client_result.ConnectionsEstablishedGet()) print(\"", "port_l3 = port.Layer3IPv6Set() # IPv6 address = ip_config[0] prefix_length =", "0 means no limit 'max_total_sessions': 0, # TOS value to", "self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions'] self.tos = kwargs['tos'] self.server", "the # enumeration used by the API from byteblowerll.byteblower import", "a page of a specific size... http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either", "be configured\") print(\"Starting the HTTP client\") http_client.Start() http_client_result = http_client.ResultGet()", "HTTP server. 'port_1_config': { 'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01', # IP", "python3 print function compatibility from __future__ import print_function # import", "not isinstance(config['ip_address'], str): ip = config['ip_address'][0] if '/' in ip:", "stopped. 'duration': 10000000000, # session duration, in nanoseconds # Duration", "# Needed for python2 / python3 print function compatibility from", "# Helper function, we can use this to parse the", "== \"dhcpv6\": port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet() elif", "self.server = None def run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting to", "= kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions'] self.tos =", "GET, # traffic will flow from the HTTP server to", "page of a specific size... http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either duration", "the HTTP Clients will use to connect with # the", "of a specific size... http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either duration or", "kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions'] self.tos = kwargs['tos']", "= port.Layer3IPv6Set() # IPv6 address = ip_config[0] prefix_length = ip_config[1]", "(and server) 'tos': 0 } class Example: def __init__(self, **kwargs):", "is GET, # traffic will flow from the HTTP server", "return [ self.duration, self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed,", "the ByteBlower module import byteblowerll.byteblower as byteblower import time configuration", "import byteblowerll.byteblower as byteblower import time configuration = { #", "\"slaac\" # 'ip': 'slaac', # if staticv4, use [\"ipaddress\", netmask,", "rx_bytes, tx_speed, rx_speed, request_status_value ] def provision_port(self, config): port =", "'session_duration': 1500000000, 'session_duration': None, # session size, in bytes #", "'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port number to be used", "HTTP Method # HTTP Method can be GET or PUT", "= http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet() http_server_result =", "print(\"Created port\", port.DescriptionGet()) return port # When this python module", "let the HTTP server listen for requests http_server.Start() # -", "http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port'] if server_tcp_port is not", "self.session_size is not None: # let the HTTP Client request", "http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet()", "config['ip_address'] = port_l3.IpStatelessGet() else: # Static configuration if len(ip_config) ==", "web server # - PUT: Standard HTTP upload, the wireless", "enumeration used by the API from byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg", "\"dhcpv6\": port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpDhcpGet() elif ip_config.lower()", "\"dhcpv4\" 'ip': 'dhcpv4', # if DHCPv6, use \"dhcpv6\" # 'ip':", "all sessions will be stopped. 'duration': 10000000000, # session duration,", "# 'session_size': None, # max concurrent sessions # Maximum number", "the # webserver 'http_method': 'GET', # 'http_method': 'PUT', # total", "# session duration, in nanoseconds # Duration of the individual", "# 'ip': 'dhcpv6', # if SLAAC, use \"slaac\" # 'ip':", "flow. When this duration expires, # all sessions will be", "how many connections can be running at the same time", "Result data : {}\".format(http_client_result.DescriptionGet())) print(\"Server Result data : {}\".format(http_server_result.DescriptionGet())) return", "self.session_size = kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions'] self.tos", "HTTP Method is GET, # traffic will flow from the", "server_tcp_port = self.port_1_config['tcp_port'] if server_tcp_port is not None: http_server.PortSet(server_tcp_port) else:", ": {}\".format(http_server_result.DescriptionGet())) return [ self.duration, self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes,", "or PUT # - GET: Standard HTTP download, we retrieve", "maximum number of sessions # No more than this number", "range(10): time.sleep(1) http_client_result.Refresh() print(\"-\" * 10) print(\"Iteration\", iteration+1) print(\" connections", "port_l3.IpStatelessGet() else: # Static configuration if len(ip_config) == 3: #", "gateway] # 'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], # if staticv6, use", "the web server # - PUT: Standard HTTP upload, the", "http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how many connections can be running at", "HTTP Client port\") # Create the port which will be", "specific size... http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either duration or request_size must", "can be running at the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # -", "IPv6 address = ip_config[0] prefix_length = ip_config[1] ip = \"{}/{}\".format(address,", "# Maximum number of sessions that will be running simultaneously", "= None if self.port_2: self.server.PortDestroy(self.port_2) self.port_2 = None if self.server", "if self.server is not None: byteblower_instance.ServerRemove(self.server) self.server = None def", "__future__ import print_function # import the ByteBlower module import byteblowerll.byteblower", "port. # Will be used as HTTP client. 'port_2_config': {", "be the HTTP client (port_2) self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address =", "ip_config.lower() == \"dhcpv4\": port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet()", "of # examples. if __name__ == \"__main__\": example = Example(**configuration)", "HTTP server, this will be the port on which the", "\"192.168.0.1\"], # if staticv6, use [\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b',", "self.duration = kwargs['duration'] self.session_duration = kwargs['session_duration'] self.session_size = kwargs['session_size'] self.max_concurrent_sessions", "If the HTTP Method is PUT, # traffic will flow", "created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how many connections can be running", "bytes transmitted by a session 'session_size': 1 * 1000 *", "HTTP Clients will use to connect with # the HTTP", "HTTP client to the HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet())", "# if DHCPv6, use \"dhcpv6\" # ip': 'dhcpv6', # if", "= kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions'] self.tos = kwargs['tos'] self.server =", "} class Example: def __init__(self, **kwargs): self.server_address = kwargs['server_address'] self.port_1_config", "the server # listens. 'tcp_port': 4096 }, # Configuration for", "if DHCPv4, use \"dhcpv4\" 'ip': 'dhcpv4', # if DHCPv6, use", "the port which will be the HTTP server (port_1) print(\"Creating", "configured\") print(\"Starting the HTTP client\") http_client.Start() http_client_result = http_client.ResultGet() for", "session duration, in nanoseconds # Duration of the individual sessions", "for the ByteBlower Python API. All examples are guaranteed to", "None def cleanup(self): \"\"\"Clean up the created objects\"\"\" byteblower_instance =", "\"dhcpv4\" 'ip': 'dhcpv4', # if DHCPv6, use \"dhcpv6\" # ip':", "# No more than this number of sessions will be", "= kwargs['port_2_config'] # Helper function, we can use this to", "config): port = self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config =", "iteration in range(10): time.sleep(1) http_client_result.Refresh() print(\"-\" * 10) print(\"Iteration\", iteration+1)", "of the individual sessions # 'session_duration': 1500000000, 'session_duration': None, #", "the individual sessions # 'session_duration': 1500000000, 'session_duration': None, # session", "= kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration'] self.session_duration =", "HTTP Method is PUT, # traffic will flow from the", "None, # session size, in bytes # The number of", "server_tcp_port is not None: http_server.PortSet(server_tcp_port) else: server_tcp_port = http_server.PortGet() #", "None: byteblower_instance.ServerRemove(self.server) self.server = None def run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet()", "'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], # if staticv6, use [\"ipaddress\", prefixlength]", "= http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration : {}", "- local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure the direction. #", "3: # IPv4 port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address']", "# Options are 'DHCPv4', 'DHCPv6', 'SLAAC', 'static' # if DHCPv4,", "2: port_l3 = port.Layer3IPv6Set() # IPv6 address = ip_config[0] prefix_length", "self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config = config['ip'] if not", "API from byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method'] self.http_method =", "isinstance(ip_config, list): # Config is not static, DHCP or slaac", "def cleanup(self): \"\"\"Clean up the created objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet()", "connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10) http_client.Stop() http_server.Stop() print(\"Stopped the", "client. 'port_2_config': { 'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02', # IP configuration", "bytes # The number of bytes transmitted by a session", "PUT # - GET: Standard HTTP download, we retrieve data", "limit 'max_total_sessions': 0, # TOS value to use on the", "def run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower server %s...\"", "the HTTP Client request a page of a specific duration", "http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either duration or request_size must be configured\")", "= self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address'] # create a HTTP server", "# This is the duration of the flow. When this", "include it in a series of # examples. if __name__", "Options are 'DHCPv4', 'DHCPv6', 'SLAAC', 'static' # if DHCPv4, use", "# create a HTTP Client http_client = self.port_2.ProtocolHttpMultiClientAdd() # -", "if len(ip_config) == 3: # IPv4 port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0])", "kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config'] # Helper function, we can use", "will be stopped. 'duration': 10000000000, # session duration, in nanoseconds", "print(\" connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" *", "# listens. 'tcp_port': 4096 }, # Configuration for the second", "endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # - local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) #", "listen for requests http_server.Start() # - total duration of all", "All examples are guaranteed to work with Python 2.7 and", "used by the API from byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg =", "# Configuration for the second ByteBlower port. # Will be", "self.session_duration = kwargs['session_duration'] self.session_size = kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions", "sessions that will be running simultaneously 'max_concurrent_sessions': 100, # maximum", "http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet()", "DHCPv6, use \"dhcpv6\" # 'ip': 'dhcpv6', # if SLAAC, use", "= config['ip_address'][0] if '/' in ip: config['ip_address'] = ip.split('/')[0] print(\"Created", "config['ip_address'][0] if '/' in ip: config['ip_address'] = ip.split('/')[0] print(\"Created port\",", "'port_1_config': { 'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01', # IP configuration for", "http_method_arg = kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration'] self.session_duration", "# 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port range the HTTP", "HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet()) #", "how many connections can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how", "ip_config[0] prefix_length = ip_config[1] ip = \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address']", "Python API. All examples are guaranteed to work with Python", "# IPv4 port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] =", "in range(10): time.sleep(1) http_client_result.Refresh() print(\"-\" * 10) print(\"Iteration\", iteration+1) print(\"", "= port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet() elif len(ip_config)", "'http_method': 'GET', # 'http_method': 'PUT', # total duration, in nanoseconds.", "Configure the direction. # If the HTTP Method is GET,", "as byteblower import time configuration = { # Address (IP", "Static configuration if len(ip_config) == 3: # IPv4 port_l3 =", "import the ByteBlower module import byteblowerll.byteblower as byteblower import time", "a HTTP Client http_client = self.port_2.ProtocolHttpMultiClientAdd() # - remote endpoint", "ip = \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0] if not", "ip: config['ip_address'] = ip.split('/')[0] print(\"Created port\", port.DescriptionGet()) return port #", "for requests http_server.Start() # - total duration of all sessions", "nanoseconds. # This is the duration of the flow. When", "'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port range the HTTP Clients", "# HTTP Method # HTTP Method can be GET or", "'DHCPv6', 'SLAAC', 'static' # if DHCPv4, use \"dhcpv4\" 'ip': 'dhcpv4',", "we retrieve data from the web server # - PUT:", "port. # Will be used as HTTP server. 'port_1_config': {", "100, # maximum number of sessions # No more than", "On the HTTP server, this will be the port on", "Ex<NAME>. \"\"\" # Needed for python2 / python3 print function", "parse the HTTP Method to the # enumeration used by", "def __init__(self, **kwargs): self.server_address = kwargs['server_address'] self.port_1_config = kwargs['port_1_config'] self.port_2_config", "# Duration of the individual sessions # 'session_duration': 1500000000, 'session_duration':", "kwargs['server_address'] self.port_1_config = kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config'] # Helper function,", "2.7 and above Copyright 2018, Ex<NAME>. \"\"\" # Needed for", "HTTP client (port_2) self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address'] #", "elif self.session_size is not None: # let the HTTP Client", "port\") self.port_1 = self.provision_port(self.port_1_config) print(\"Creating HTTP Client port\") # Create", "print(\"-\" * 10) print(\"Iteration\", iteration+1) print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\"", "\"dhcpv4\": port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet() elif ip_config.lower()", "else: # Static configuration if len(ip_config) == 3: # IPv4", "server # listens. 'tcp_port': 4096 }, # Configuration for the", "use \"dhcpv4\" 'ip': 'dhcpv4', # if DHCPv6, use \"dhcpv6\" #", "be the HTTP server (port_1) print(\"Creating HTTP Server port\") self.port_1", "sessions will be stopped. 'duration': 10000000000, # session duration, in", "request a page of a specific size... http_client.SessionSizeSet(self.session_size) else: raise", "] def provision_port(self, config): port = self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet()", "specific duration # to download... http_client.SessionDurationSet(self.session_duration) elif self.session_size is not", "a page of a specific duration # to download... http_client.SessionDurationSet(self.session_duration)", "'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02', # IP configuration for the ByteBlower", "byteblower import time configuration = { # Address (IP or", "# TCP port range the HTTP Clients will use to", "range the HTTP Clients will use to connect with #", "port_l3.IpGet() elif ip_config.lower() == \"dhcpv6\": port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address']", "the HTTP client (and server) 'tos': 0 } class Example:", "# examples. if __name__ == \"__main__\": example = Example(**configuration) try:", "to the HTTP client # If the HTTP Method is", "configuration = { # Address (IP or FQDN) of the", "if server_tcp_port is not None: http_server.PortSet(server_tcp_port) else: server_tcp_port = http_server.PortGet()", "in nanoseconds # Duration of the individual sessions # 'session_duration':", "than this number of sessions will be created # 0", "# if DHCPv4, use \"dhcpv4\" 'ip': 'dhcpv4', # if DHCPv6,", "to be used by the HTTP connection. # On the", "cleanup(self): \"\"\"Clean up the created objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet() if", "ip_config = config['ip'] if not isinstance(ip_config, list): # Config is", "= self.port_2.ProtocolHttpMultiClientAdd() # - remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # -", "if __name__ == \"__main__\": example = Example(**configuration) try: example.run() finally:", "HTTP client\") http_client.Start() http_client_result = http_client.ResultGet() for iteration in range(10):", "used as HTTP client. 'port_2_config': { 'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02',", "# The number of bytes transmitted by a session 'session_size':", "the HTTP client\") request_status_value = http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh()", "if self.port_1: self.server.PortDestroy(self.port_1) self.port_1 = None if self.port_2: self.server.PortDestroy(self.port_2) self.port_2", "of sessions that will be running simultaneously 'max_concurrent_sessions': 100, #", "the direction. # If the HTTP Method is GET, #", "the HTTP client to the HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server port:\",", "the duration of the flow. When this duration expires, #", "port # When this python module is called stand-alone, the", "kwargs['port_2_config'] # Helper function, we can use this to parse", "prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0] if not isinstance(config['ip_address'], str): ip", "client to the HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client", "print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet()) # let the HTTP", "'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for the first ByteBlower port. #", "* 1000 * 1000, # 'session_size': None, # max concurrent", "this to parse the HTTP Method to the # enumeration", "port which will be the HTTP server (port_1) print(\"Creating HTTP", "# When this python module is called stand-alone, the run-function", "the HTTP Method is GET, # traffic will flow from", "connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10)", "max concurrent sessions # Maximum number of sessions that will", ": {} nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string)) print(\"Client Result data :", "print(\"Iteration\", iteration+1) print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections established\", http_client_result.ConnectionsEstablishedGet())", "established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections refused\", http_client_result.ConnectionsRefusedGet())", "When this python module is called stand-alone, the run-function must", "data : {}\".format(http_client_result.DescriptionGet())) print(\"Server Result data : {}\".format(http_server_result.DescriptionGet())) return [", "2018, Ex<NAME>. \"\"\" # Needed for python2 / python3 print", "TCP port range the HTTP Clients will use to connect", "http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet()", "self.server.PortDestroy(self.port_1) self.port_1 = None if self.port_2: self.server.PortDestroy(self.port_2) self.port_2 = None", "# create a HTTP server http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port =", "1000 * 1000, # 'session_size': None, # max concurrent sessions", "# If the HTTP Method is GET, # traffic will", "# to download... http_client.SessionDurationSet(self.session_duration) elif self.session_size is not None: #", "10) print(\"Iteration\", iteration+1) print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections established\",", "0, # TOS value to use on the HTTP client", "'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01', # IP configuration for the ByteBlower", "self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address) # Create the port which will", "= self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config = config['ip'] if", "slaac if ip_config.lower() == \"dhcpv4\": port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address']", "server %s...\" % self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address) # Create the", "HTTP Client http_client = self.port_2.ProtocolHttpMultiClientAdd() # - remote endpoint http_client.RemoteAddressSet(http_server_ip_address)", "http_client.ResultGet() for iteration in range(10): time.sleep(1) http_client_result.Refresh() print(\"-\" * 10)", "examples. if __name__ == \"__main__\": example = Example(**configuration) try: example.run()", "number to be used by the HTTP connection. # On", "will be the port on which the server # listens.", "print(\"Starting the HTTP client\") http_client.Start() http_client_result = http_client.ResultGet() for iteration", "\"\"\"Clean up the created objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet() if self.port_1:", "HTTP client. 'port_2_config': { 'interface': 'trunk-1-25', 'mac': '00:bb:01:00:00:02', # IP", "from byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg)", "* 10) print(\"Iteration\", iteration+1) print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections", "data to the # webserver 'http_method': 'GET', # 'http_method': 'PUT',", "examples are guaranteed to work with Python 2.7 and above", "it in a series of # examples. if __name__ ==", "%s...\" % self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address) # Create the port", "second ByteBlower port. # Will be used as HTTP client.", "not isinstance(ip_config, list): # Config is not static, DHCP or", "be the port on which the server # listens. 'tcp_port':", "HTTP server 'tcp_port_min': 32000, 'tcp_port_max': 50000 }, # HTTP Method", "and above Copyright 2018, Ex<NAME>. \"\"\" # Needed for python2", "transmitted by a session 'session_size': 1 * 1000 * 1000,", "on which the server # listens. 'tcp_port': 4096 }, #", "this number of sessions will be created # 0 means", "if DHCPv6, use \"dhcpv6\" # 'ip': 'dhcpv6', # if SLAAC,", "upload, the wireless endpoint will push data to the #", "self.port_2 = None def cleanup(self): \"\"\"Clean up the created objects\"\"\"", "import print_function # import the ByteBlower module import byteblowerll.byteblower as", "provision_port(self, config): port = self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config", "# import the ByteBlower module import byteblowerll.byteblower as byteblower import", "time-based if self.session_duration is not None: # let the HTTP", "self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed, rx_speed, request_status_value ] def", "'00:bb:01:00:00:01', # IP configuration for the ByteBlower Port. # Options", "'slaac', # if staticv4, use [\"ipaddress\", netmask, gateway] # 'ip':", "# 'ip': 'slaac', # if staticv4, use [\"ipaddress\", netmask, gateway]", "port\") # Create the port which will be the HTTP", "from the HTTP client to the HTTP server http_client.HttpMethodSet(self.http_method) print(\"Server", "else: raise ValueError(\"Either duration or request_size must be configured\") print(\"Starting", "http_client.HttpMethodSet(self.http_method) print(\"Server port:\", self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet()) # let the", "print(\"Client Result data : {}\".format(http_client_result.DescriptionGet())) print(\"Server Result data : {}\".format(http_server_result.DescriptionGet()))", "the created objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1) self.port_1", "for iteration in range(10): time.sleep(1) http_client_result.Refresh() print(\"-\" * 10) print(\"Iteration\",", "Will be used as HTTP client. 'port_2_config': { 'interface': 'trunk-1-25',", "for the ByteBlower Port. # Options are 'DHCPv4', 'DHCPv6', 'SLAAC',", "sessions # Maximum number of sessions that will be running", "running simultaneously 'max_concurrent_sessions': 100, # maximum number of sessions #", "1000, # 'session_size': None, # max concurrent sessions # Maximum", "HTTP server (port_1) print(\"Creating HTTP Server port\") self.port_1 = self.provision_port(self.port_1_config)", "* 10) http_client.Stop() http_server.Stop() print(\"Stopped the HTTP client\") request_status_value =", "# traffic will flow from the HTTP server to the", "this duration expires, # all sessions will be stopped. 'duration':", "self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port'] if server_tcp_port is not None: http_server.PortSet(server_tcp_port)", "# - how many connections can be running at the", "self.port_1_config = kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config'] # Helper function, we", "# - local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure the direction.", "byteblowerll.byteblower import ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration", "session 'session_size': 1 * 1000 * 1000, # 'session_size': None,", "'mac': '00:bb:01:00:00:01', # IP configuration for the ByteBlower Port. #", "sessions http_client.DurationSet(self.duration) # - how many connections can be created?", "http_client.StatusGet() request_status_string = byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed =", "Client port\") # Create the port which will be the", "HTTP server http_server = self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port'] if server_tcp_port", "at the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual duration, can", "= byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1) self.port_1 = None if self.port_2:", "the HTTP client\") http_client.Start() http_client_result = http_client.ResultGet() for iteration in", "this python module is called stand-alone, the run-function must be", "= http_client.ResultGet() for iteration in range(10): time.sleep(1) http_client_result.Refresh() print(\"-\" *", "HTTP MultiServer/MultiClient for the ByteBlower Python API. All examples are", "to the # webserver 'http_method': 'GET', # 'http_method': 'PUT', #", "'http_method': 'PUT', # total duration, in nanoseconds. # This is", "kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions'] self.tos = kwargs['tos'] self.server = None", "self.port_2.DescriptionGet()) # let the HTTP server listen for requests http_server.Start()", "= config['ip'] if not isinstance(ip_config, list): # Config is not", "http_client.Start() http_client_result = http_client.ResultGet() for iteration in range(10): time.sleep(1) http_client_result.Refresh()", "print(\" connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10) http_client.Stop() http_server.Stop() print(\"Stopped", "# TCP port number to be used by the HTTP", "is called stand-alone, the run-function must be # called. This", "webserver 'http_method': 'GET', # 'http_method': 'PUT', # total duration, in", "byteblower_instance = byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1) self.port_1 = None if", "number of sessions will be created # 0 means no", "if SLAAC, use \"slaac\" # 'ip': 'slaac', # if staticv4,", "use \"slaac\" # 'ip': 'slaac', # if staticv4, use [\"ipaddress\",", "Clients will use to connect with # the HTTP server", "prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port number to", "== 3: # IPv4 port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2])", "1500000000, 'session_duration': None, # session size, in bytes # The", "= ip_config[1] ip = \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0]", "to include it in a series of # examples. if", "traffic will flow from the HTTP client to the HTTP", "used by the HTTP connection. # On the HTTP server,", "total duration, in nanoseconds. # This is the duration of", "= http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet() http_server_result.Refresh() print(\"Requested", "the HTTP server, this will be the port on which", "No more than this number of sessions will be created", "FQDN) of the ByteBlower server to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', #", "# Will be used as HTTP client. 'port_2_config': { 'interface':", "{}\".format(request_status_string)) print(\"Client Result data : {}\".format(http_client_result.DescriptionGet())) print(\"Server Result data :", "= None self.port_2 = None def cleanup(self): \"\"\"Clean up the", "port_l2.MacSet(config['mac']) ip_config = config['ip'] if not isinstance(ip_config, list): # Config", "if ip_config.lower() == \"dhcpv4\": port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] =", "tx_bytes, rx_bytes, tx_speed, rx_speed, request_status_value ] def provision_port(self, config): port", "netmask, gateway] # 'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], # if staticv6,", "(port_1) print(\"Creating HTTP Server port\") self.port_1 = self.provision_port(self.port_1_config) print(\"Creating HTTP", "def provision_port(self, config): port = self.server.PortCreate(config['interface']) port_l2 = port.Layer2EthIISet() port_l2.MacSet(config['mac'])", "== \"slaac\": port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet() else:", "be # called. This approach makes it possible to include", "# ip': 'dhcpv6', # if SLAAC, use \"slaac\" # 'ip':", "if self.port_2: self.server.PortDestroy(self.port_2) self.port_2 = None if self.server is not", "port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet() else: # Static configuration if", "direction. # If the HTTP Method is GET, # traffic", "if staticv4, use [\"ipaddress\", netmask, gateway] # 'ip': ['192.168.0.2', \"255.255.255.0\",", "raise ValueError(\"Either duration or request_size must be configured\") print(\"Starting the", "the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual duration, can be", "a specific size... http_client.SessionSizeSet(self.session_size) else: raise ValueError(\"Either duration or request_size", "ByteBlower port. # Will be used as HTTP server. 'port_1_config':", "create a HTTP Client http_client = self.port_2.ProtocolHttpMultiClientAdd() # - remote", "use \"dhcpv6\" # ip': 'dhcpv6', # if SLAAC, use \"slaac\"", "first ByteBlower port. # Will be used as HTTP server.", "self.server_address = kwargs['server_address'] self.port_1_config = kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config'] #", "total duration of all sessions http_client.DurationSet(self.duration) # - how many", "attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections aborted\", http_client_result.ConnectionsAbortedGet())", "port which will be the HTTP client (port_2) self.port_2 =", "**kwargs): self.server_address = kwargs['server_address'] self.port_1_config = kwargs['port_1_config'] self.port_2_config = kwargs['port_2_config']", "HTTP client # If the HTTP Method is PUT, #", "size-based or time-based if self.session_duration is not None: # let", "self.port_2 = None if self.server is not None: byteblower_instance.ServerRemove(self.server) self.server", "to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for the first ByteBlower", "be used by the HTTP connection. # On the HTTP", "kwargs['duration'] self.session_duration = kwargs['session_duration'] self.session_size = kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions']", "port:\", self.port_2.DescriptionGet()) # let the HTTP server listen for requests", "refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10) http_client.Stop() http_server.Stop() print(\"Stopped the HTTP", "ParseHTTPRequestMethodFromString http_method_arg = kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration']", "self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address'] # create a HTTP", "= http_server.PortGet() # create a HTTP Client http_client = self.port_2.ProtocolHttpMultiClientAdd()", "must be # called. This approach makes it possible to", "'session_size': None, # max concurrent sessions # Maximum number of", "None self.port_2 = None def cleanup(self): \"\"\"Clean up the created", "the HTTP connection. # On the HTTP server, this will", "rx_speed = http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration :", "http_server.Stop() print(\"Stopped the HTTP client\") request_status_value = http_client.StatusGet() request_status_string =", "to ByteBlower server %s...\" % self.server_address) self.server = byteblower_instance.ServerAdd(self.server_address) #", "sessions # No more than this number of sessions will", "TCP port number to be used by the HTTP connection.", "'SLAAC', 'static' # if DHCPv4, use \"dhcpv4\" 'ip': 'dhcpv4', #", "listens. 'tcp_port': 4096 }, # Configuration for the second ByteBlower", "{} nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string)) print(\"Client Result data : {}\".format(http_client_result.DescriptionGet()))", "the HTTP server 'tcp_port_min': 32000, 'tcp_port_max': 50000 }, # HTTP", "None: http_server.PortSet(server_tcp_port) else: server_tcp_port = http_server.PortGet() # create a HTTP", "to download... http_client.SessionDurationSet(self.session_duration) elif self.session_size is not None: # let", "# Create the port which will be the HTTP client", "or request_size must be configured\") print(\"Starting the HTTP client\") http_client.Start()", "port:\", self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet()) # let the HTTP server", "called stand-alone, the run-function must be # called. This approach", "use [\"ipaddress\", netmask, gateway] # 'ip': ['192.168.0.2', \"255.255.255.0\", \"192.168.0.1\"], #", "Needed for python2 / python3 print function compatibility from __future__", "server # - PUT: Standard HTTP upload, the wireless endpoint", "self.port_1 = None self.port_2 = None def cleanup(self): \"\"\"Clean up", "- PUT: Standard HTTP upload, the wireless endpoint will push", "Method is GET, # traffic will flow from the HTTP", "if not isinstance(ip_config, list): # Config is not static, DHCP", "'GET', # 'http_method': 'PUT', # total duration, in nanoseconds. #", "nanoseconds # Duration of the individual sessions # 'session_duration': 1500000000,", "up the created objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1)", "# max concurrent sessions # Maximum number of sessions that", "to the # enumeration used by the API from byteblowerll.byteblower", "kwargs['http_method'] self.http_method = ParseHTTPRequestMethodFromString(http_method_arg) self.duration = kwargs['duration'] self.session_duration = kwargs['session_duration']", "print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet()) print(\" connections established\", http_client_result.ConnectionsEstablishedGet()) print(\" connections", "to use on the HTTP client (and server) 'tos': 0", "port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config = config['ip'] if not isinstance(ip_config, list): #", "ip': 'dhcpv6', # if SLAAC, use \"slaac\" # 'ip': 'slaac',", "\"dhcpv6\" # 'ip': 'dhcpv6', # if SLAAC, use \"slaac\" #", "the second ByteBlower port. # Will be used as HTTP", "rx_speed, request_status_value ] def provision_port(self, config): port = self.server.PortCreate(config['interface']) port_l2", "IPv4 port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet()", "(IP or FQDN) of the ByteBlower server to use 'server_address':", "function, we can use this to parse the HTTP Method", "print(\"Client port:\", self.port_2.DescriptionGet()) # let the HTTP server listen for", "approach makes it possible to include it in a series", "must be configured\") print(\"Starting the HTTP client\") http_client.Start() http_client_result =", "= port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet() else: # Static configuration", "a session 'session_size': 1 * 1000 * 1000, # 'session_size':", "Client request a page of a specific size... http_client.SessionSizeSet(self.session_size) else:", "ByteBlower Port. # Options are 'DHCPv4', 'DHCPv6', 'SLAAC', 'static' #", "the flow. When this duration expires, # all sessions will", "API. All examples are guaranteed to work with Python 2.7", "expires, # all sessions will be stopped. 'duration': 10000000000, #", "connect with # the HTTP server 'tcp_port_min': 32000, 'tcp_port_max': 50000", "in nanoseconds. # This is the duration of the flow.", "# called. This approach makes it possible to include it", "port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0] if not isinstance(config['ip_address'], str): ip =", "isinstance(config['ip_address'], str): ip = config['ip_address'][0] if '/' in ip: config['ip_address']", "for the second ByteBlower port. # Will be used as", "can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how many connections can", "nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string)) print(\"Client Result data : {}\".format(http_client_result.DescriptionGet())) print(\"Server", "port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet() elif ip_config.lower() == \"dhcpv6\": port_l3 =", "that will be running simultaneously 'max_concurrent_sessions': 100, # maximum number", "'dhcpv4', # if DHCPv6, use \"dhcpv6\" # 'ip': 'dhcpv6', #", "= \"{}/{}\".format(address, prefix_length) port_l3.IpManualAdd(ip) config['ip_address'] = ip_config[0] if not isinstance(config['ip_address'],", "self.session_duration is not None: # let the HTTP Client request", "if DHCPv6, use \"dhcpv6\" # ip': 'dhcpv6', # if SLAAC,", "'tcp_port': 4096 }, # Configuration for the second ByteBlower port.", "the ByteBlower server to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for", "Python 2.7 and above Copyright 2018, Ex<NAME>. \"\"\" # Needed", "Copyright 2018, Ex<NAME>. \"\"\" # Needed for python2 / python3", "port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet() else: # Static", "port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet() else: # Static configuration if len(ip_config)", "= port_l3.IpGet() elif len(ip_config) == 2: port_l3 = port.Layer3IPv6Set() #", "'ip': 'dhcpv4', # if DHCPv6, use \"dhcpv6\" # ip': 'dhcpv6',", "elif ip_config.lower() == \"dhcpv6\": port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] =", "series of # examples. if __name__ == \"__main__\": example =", "client\") http_client.Start() http_client_result = http_client.ResultGet() for iteration in range(10): time.sleep(1)", "module import byteblowerll.byteblower as byteblower import time configuration = {", "HTTP server to the HTTP client # If the HTTP", "config['ip_address'] = port_l3.IpDhcpGet() elif ip_config.lower() == \"slaac\": port_l3 = port.Layer3IPv6Set()", "http_client.DurationSet(self.duration) # - how many connections can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions)", "the ByteBlower Port. # Options are 'DHCPv4', 'DHCPv6', 'SLAAC', 'static'", "'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for the first ByteBlower port. # Will", "{ 'interface': 'trunk-1-13', 'mac': '00:bb:01:00:00:01', # IP configuration for the", "to work with Python 2.7 and above Copyright 2018, Ex<NAME>.", "# maximum number of sessions # No more than this", "self.port_1 = self.provision_port(self.port_1_config) print(\"Creating HTTP Client port\") # Create the", "python2 / python3 print function compatibility from __future__ import print_function", "port.DescriptionGet()) return port # When this python module is called", "# 0 means no limit 'max_total_sessions': 0, # TOS value", "= ip.split('/')[0] print(\"Created port\", port.DescriptionGet()) return port # When this", "module is called stand-alone, the run-function must be # called.", "connections can be running at the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) #", "ByteBlower server to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com', # Configuration for the", "Method to the # enumeration used by the API from", "kwargs['session_duration'] self.session_size = kwargs['session_size'] self.max_concurrent_sessions = kwargs['max_concurrent_sessions'] self.max_total_sessions = kwargs['max_total_sessions']", "# if staticv6, use [\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'],", "Standard HTTP upload, the wireless endpoint will push data to", "# let the HTTP Client request a page of a", "self.port_1 = None if self.port_2: self.server.PortDestroy(self.port_2) self.port_2 = None if", "config['ip_address'] = port_l3.IpGet() elif ip_config.lower() == \"dhcpv6\": port_l3 = port.Layer3IPv6Set()", "__name__ == \"__main__\": example = Example(**configuration) try: example.run() finally: example.cleanup()", "is not None: byteblower_instance.ServerRemove(self.server) self.server = None def run(self): byteblower_instance", "= None def cleanup(self): \"\"\"Clean up the created objects\"\"\" byteblower_instance", "'session_size': 1 * 1000 * 1000, # 'session_size': None, #", "Duration : {} nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string)) print(\"Client Result data", "byteblower_instance.ServerAdd(self.server_address) # Create the port which will be the HTTP", "None self.port_1 = None self.port_2 = None def cleanup(self): \"\"\"Clean", "= None if self.server is not None: byteblower_instance.ServerRemove(self.server) self.server =", "http_client_result.ConnectionsAbortedGet()) print(\" connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10) http_client.Stop() http_server.Stop()", "for python2 / python3 print function compatibility from __future__ import", "page of a specific duration # to download... http_client.SessionDurationSet(self.session_duration) elif", "- individual duration, can be size-based or time-based if self.session_duration", "port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet() elif ip_config.lower() == \"dhcpv6\": port_l3", "0 } class Example: def __init__(self, **kwargs): self.server_address = kwargs['server_address']", "# - total duration of all sessions http_client.DurationSet(self.duration) # -", "Client request a page of a specific duration # to", "HTTP upload, the wireless endpoint will push data to the", "# - how many connections can be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) #", "from the HTTP server to the HTTP client # If", "is PUT, # traffic will flow from the HTTP client", "the HTTP Method is PUT, # traffic will flow from", "kwargs['max_total_sessions'] self.tos = kwargs['tos'] self.server = None self.port_1 = None", "\"slaac\": port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet() else: #", "will be the HTTP client (port_2) self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address", "__init__(self, **kwargs): self.server_address = kwargs['server_address'] self.port_1_config = kwargs['port_1_config'] self.port_2_config =", "self.server = None self.port_1 = None self.port_2 = None def", "port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet() elif len(ip_config) == 2:", "be GET or PUT # - GET: Standard HTTP download,", "duration of the flow. When this duration expires, # all", "is not static, DHCP or slaac if ip_config.lower() == \"dhcpv4\":", "port.Layer3IPv6Set() # IPv6 address = ip_config[0] prefix_length = ip_config[1] ip", "are 'DHCPv4', 'DHCPv6', 'SLAAC', 'static' # if DHCPv4, use \"dhcpv4\"", "number of bytes transmitted by a session 'session_size': 1 *", "will be running simultaneously 'max_concurrent_sessions': 100, # maximum number of", "the HTTP client # If the HTTP Method is PUT,", "TOS value to use on the HTTP client (and server)", "the wireless endpoint will push data to the # webserver", "work with Python 2.7 and above Copyright 2018, Ex<NAME>. \"\"\"", "tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed", "use \"dhcpv6\" # 'ip': 'dhcpv6', # if SLAAC, use \"slaac\"", "created objects\"\"\" byteblower_instance = byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1) self.port_1 =", "print(\"Requested Duration : {} nanoseconds\".format(self.duration)) print(\"Status : {}\".format(request_status_string)) print(\"Client Result", "python module is called stand-alone, the run-function must be #", "'ip': 'dhcpv4', # if DHCPv6, use \"dhcpv6\" # 'ip': 'dhcpv6',", "self.port_1: self.server.PortDestroy(self.port_1) self.port_1 = None if self.port_2: self.server.PortDestroy(self.port_2) self.port_2 =", "rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet() http_server_result.Refresh()", "= self.port_1.ProtocolHttpMultiServerAdd() server_tcp_port = self.port_1_config['tcp_port'] if server_tcp_port is not None:", "ByteBlower module import byteblowerll.byteblower as byteblower import time configuration =", "all sessions http_client.DurationSet(self.duration) # - how many connections can be", "# all sessions will be stopped. 'duration': 10000000000, # session", "duration, in nanoseconds # Duration of the individual sessions #", "session size, in bytes # The number of bytes transmitted", "HTTP Client request a page of a specific duration #", "print(\"Status : {}\".format(request_status_string)) print(\"Client Result data : {}\".format(http_client_result.DescriptionGet())) print(\"Server Result", "= self.port_1_config['tcp_port'] if server_tcp_port is not None: http_server.PortSet(server_tcp_port) else: server_tcp_port", "DHCP or slaac if ip_config.lower() == \"dhcpv4\": port_l3 = port.Layer3IPv4Set()", "self.port_1.DescriptionGet()) print(\"Client port:\", self.port_2.DescriptionGet()) # let the HTTP server listen", "= kwargs['max_total_sessions'] self.tos = kwargs['tos'] self.server = None self.port_1 =", "port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet() elif ip_config.lower() ==", "individual duration, can be size-based or time-based if self.session_duration is", "= self.provision_port(self.port_1_config) print(\"Creating HTTP Client port\") # Create the port", "ValueError(\"Either duration or request_size must be configured\") print(\"Starting the HTTP", "use to connect with # the HTTP server 'tcp_port_min': 32000,", "self.max_concurrent_sessions, tx_bytes, rx_bytes, tx_speed, rx_speed, request_status_value ] def provision_port(self, config):", "Create the port which will be the HTTP client (port_2)", "use this to parse the HTTP Method to the #", "}, # Configuration for the second ByteBlower port. # Will", "be size-based or time-based if self.session_duration is not None: #", "== \"dhcpv4\": port_l3 = port.Layer3IPv4Set() port_l3.ProtocolDhcpGet().Perform() config['ip_address'] = port_l3.IpGet() elif", "config['ip_address'] = port_l3.IpGet() elif len(ip_config) == 2: port_l3 = port.Layer3IPv6Set()", "'dhcpv6', # if SLAAC, use \"slaac\" # 'ip': 'slaac', #", "is not None: # let the HTTP Client request a", ": {}\".format(http_client_result.DescriptionGet())) print(\"Server Result data : {}\".format(http_server_result.DescriptionGet())) return [ self.duration,", "by a session 'session_size': 1 * 1000 * 1000, #", "request_status_value ] def provision_port(self, config): port = self.server.PortCreate(config['interface']) port_l2 =", "# Config is not static, DHCP or slaac if ip_config.lower()", "str): ip = config['ip_address'][0] if '/' in ip: config['ip_address'] =", "if '/' in ip: config['ip_address'] = ip.split('/')[0] print(\"Created port\", port.DescriptionGet())", "server 'tcp_port_min': 32000, 'tcp_port_max': 50000 }, # HTTP Method #", "static, DHCP or slaac if ip_config.lower() == \"dhcpv4\": port_l3 =", "the port on which the server # listens. 'tcp_port': 4096", "requests http_server.Start() # - total duration of all sessions http_client.DurationSet(self.duration)", "PUT: Standard HTTP upload, the wireless endpoint will push data", "be running at the same time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual", "class Example: def __init__(self, **kwargs): self.server_address = kwargs['server_address'] self.port_1_config =", "50000 }, # HTTP Method # HTTP Method can be", "'dhcpv4', # if DHCPv6, use \"dhcpv6\" # ip': 'dhcpv6', #", "(port_2) self.port_2 = self.provision_port(self.port_2_config) http_server_ip_address = self.port_1_config['ip_address'] # create a", "Create the port which will be the HTTP server (port_1)", "None def run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower server", "be created? http_client.CumulativeConnectionLimitSet(self.max_total_sessions) # - how many connections can be", "time.sleep(1) http_client_result.Refresh() print(\"-\" * 10) print(\"Iteration\", iteration+1) print(\" connections attempted\",", "data : {}\".format(http_server_result.DescriptionGet())) return [ self.duration, self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions,", "be used as HTTP client. 'port_2_config': { 'interface': 'trunk-1-25', 'mac':", "byteblower.ConvertHTTPMultiClientStatusToString(request_status_value) http_client_result.Refresh() tx_bytes = http_client_result.TcpTxByteCountGet() tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes =", "http_client_result.Refresh() print(\"-\" * 10) print(\"Iteration\", iteration+1) print(\" connections attempted\", http_client_result.ConnectionsAttemptedGet())", "# let the HTTP server listen for requests http_server.Start() #", "time http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual duration, can be size-based or", "{}\".format(http_server_result.DescriptionGet())) return [ self.duration, self.session_duration, self.session_size, self.max_total_sessions, self.max_concurrent_sessions, tx_bytes, rx_bytes,", "= kwargs['tos'] self.server = None self.port_1 = None self.port_2 =", "http_client.RemotePortSet(server_tcp_port) # - local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure the", "duration of all sessions http_client.DurationSet(self.duration) # - how many connections", "# - remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # - local endpoint", "aborted\", http_client_result.ConnectionsAbortedGet()) print(\" connections refused\", http_client_result.ConnectionsRefusedGet()) print(\"-\" * 10) http_client.Stop()", "use [\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port", "1 * 1000 * 1000, # 'session_size': None, # max", "port on which the server # listens. 'tcp_port': 4096 },", "http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet() http_server_result = http_server.ResultGet() http_server_result.Refresh() print(\"Requested Duration", "elif len(ip_config) == 2: port_l3 = port.Layer3IPv6Set() # IPv6 address", "http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max']) # Configure the direction. # If the HTTP", "[\"ipaddress\", prefixlength] # 'ip': ['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port number", "will flow from the HTTP server to the HTTP client", "the run-function must be # called. This approach makes it", "port\", port.DescriptionGet()) return port # When this python module is", "port_l3.IpGet() elif len(ip_config) == 2: port_l3 = port.Layer3IPv6Set() # IPv6", "= port_l3.IpGet() elif ip_config.lower() == \"dhcpv6\": port_l3 = port.Layer3IPv6Set() port_l3.ProtocolDhcpGet().Perform()", "not None: byteblower_instance.ServerRemove(self.server) self.server = None def run(self): byteblower_instance =", "number of sessions # No more than this number of", "the HTTP server (port_1) print(\"Creating HTTP Server port\") self.port_1 =", "port_l3 = port.Layer3IPv4Set() port_l3.IpSet(ip_config[0]) port_l3.NetmaskSet(ip_config[1]) port_l3.GatewaySet(ip_config[2]) config['ip_address'] = port_l3.IpGet() elif", "= port.Layer2EthIISet() port_l2.MacSet(config['mac']) ip_config = config['ip'] if not isinstance(ip_config, list):", "config['ip_address'] = ip.split('/')[0] print(\"Created port\", port.DescriptionGet()) return port # When", "not static, DHCP or slaac if ip_config.lower() == \"dhcpv4\": port_l3", "request a page of a specific duration # to download...", "or FQDN) of the ByteBlower server to use 'server_address': 'byteblower-tp-1300.lab.byteblower.excentis.com',", "is the duration of the flow. When this duration expires,", "number of sessions that will be running simultaneously 'max_concurrent_sessions': 100,", "port number to be used by the HTTP connection. #", "= { # Address (IP or FQDN) of the ByteBlower", "the HTTP server listen for requests http_server.Start() # - total", "port_l3.IpDhcpGet() elif ip_config.lower() == \"slaac\": port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address']", "run(self): byteblower_instance = byteblower.ByteBlower.InstanceGet() print(\"Connecting to ByteBlower server %s...\" %", "'tcp_port_max': 50000 }, # HTTP Method # HTTP Method can", "not None: http_server.PortSet(server_tcp_port) else: server_tcp_port = http_server.PortGet() # create a", "will push data to the # webserver 'http_method': 'GET', #", "Example: def __init__(self, **kwargs): self.server_address = kwargs['server_address'] self.port_1_config = kwargs['port_1_config']", "we can use this to parse the HTTP Method to", "http_server.PortGet() # create a HTTP Client http_client = self.port_2.ProtocolHttpMultiClientAdd() #", "client # If the HTTP Method is PUT, # traffic", "remote endpoint http_client.RemoteAddressSet(http_server_ip_address) http_client.RemotePortSet(server_tcp_port) # - local endpoint http_client.LocalPortRangeSet(self.port_2_config['tcp_port_min'], self.port_2_config['tcp_port_max'])", "ip_config.lower() == \"slaac\": port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] = port_l3.IpStatelessGet()", "None if self.port_2: self.server.PortDestroy(self.port_2) self.port_2 = None if self.server is", "server_tcp_port = http_server.PortGet() # create a HTTP Client http_client =", "'tcp_port_min': 32000, 'tcp_port_max': 50000 }, # HTTP Method # HTTP", "# - individual duration, can be size-based or time-based if", "# enumeration used by the API from byteblowerll.byteblower import ParseHTTPRequestMethodFromString", "byteblower.ByteBlower.InstanceGet() if self.port_1: self.server.PortDestroy(self.port_1) self.port_1 = None if self.port_2: self.server.PortDestroy(self.port_2)", "the first ByteBlower port. # Will be used as HTTP", "self.port_1_config['tcp_port'] if server_tcp_port is not None: http_server.PortSet(server_tcp_port) else: server_tcp_port =", "['fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b', '64'], # TCP port range the HTTP Clients will", "GET or PUT # - GET: Standard HTTP download, we", "push data to the # webserver 'http_method': 'GET', # 'http_method':", "# If the HTTP Method is PUT, # traffic will", "elif ip_config.lower() == \"slaac\": port_l3 = port.Layer3IPv6Set() port_l3.StatelessAutoconfiguration() config['ip_address'] =", "}, # HTTP Method # HTTP Method can be GET", "'/' in ip: config['ip_address'] = ip.split('/')[0] print(\"Created port\", port.DescriptionGet()) return", "which will be the HTTP client (port_2) self.port_2 = self.provision_port(self.port_2_config)", "server to the HTTP client # If the HTTP Method", "not None: # let the HTTP Client request a page", "function compatibility from __future__ import print_function # import the ByteBlower", "let the HTTP Client request a page of a specific", "run-function must be # called. This approach makes it possible", "of sessions will be created # 0 means no limit", "# Will be used as HTTP server. 'port_1_config': { 'interface':", "tx_speed = http_client_result.TcpTxSpeedGet() rx_bytes = http_client_result.TcpRxByteCountGet() rx_speed = http_client_result.TcpRxSpeedGet() http_server_result", "of a specific duration # to download... http_client.SessionDurationSet(self.session_duration) elif self.session_size", "config['ip_address'] = ip_config[0] if not isinstance(config['ip_address'], str): ip = config['ip_address'][0]", "HTTP Method can be GET or PUT # - GET:", "http_client.MaximumConcurrentRequestsSet(self.max_concurrent_sessions) # - individual duration, can be size-based or time-based", "retrieve data from the web server # - PUT: Standard", "Maximum number of sessions that will be running simultaneously 'max_concurrent_sessions':", "- GET: Standard HTTP download, we retrieve data from the" ]
[ "the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as etree", "1 for (name, obj) in features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\":", "None: properties[\"footnote\"] = footnote coordinates = [float(lng), float(lat)] geometry =", "to a GeoJSON file. # # With the --javascript parameter,", "p.find(\"name\").text descr = p.find(\"descr\").text lat = p.find(\"lat\").text lng = p.find(\"long\").text", "number 9000 and above are not real points; they are", "= [float(lng), float(lat)] geometry = {\"type\": \"Point\", \"coordinates\": coordinates} point", "# Get the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree", "# With the --javascript parameter, the generated file is a", "fd = codecs.open(fname, \"w\", encoding=\"utf-8\") else: fd = io.open(fname, \"w\",", "{\"type\": \"LineString\", \"coordinates\": [src_coords, dst_coords]} leg = {\"type\": \"Feature\", \"properties\":", "coast = [] offshore = [] for p in doc.findall(\"legs/leg\"):", "error: no distance: src: %s dst: %s\" % (src, dst))", "doc.findall(\"points/point\"): number = p.find(\"number\").text if int(number) > MAXPOINT: continue name", "[] turning_points = [] for n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] =", "\"geometry\": geometry}, if sea == \"0\": coast.extend(leg) else: offshore.extend(leg) return", "= argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output file\")", "flen = len(features) i = 1 for (name, obj) in", "{} all_points = {} start_points = [] turning_points = []", "*/\\n') fd.write(u'export var basePodSpec = ') fd.write(u'{\"id\": %s, ' %", "name) fd.write(json.dumps(obj, ensure_ascii=False)) if i == flen: fd.write(u'}') else: i", "or int(dst) > MAXPOINT: continue if int(src) < int(dst): #", "footnote = footnoteelem.text properties = {\"number\": number, \"name\": name, \"descr\":", "import argparse import re import json import io import sys", "points; they are used to mark # area borders MAXPOINT=8999", "the generated file is a javascript # file defining a", "for p in doc.findall(\"points/point\"): number = p.find(\"number\").text if int(number) >", "XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as etree import argparse", "= p.find(\"addtime\").text if dist is None: print(\"** error: no distance:", "sys import os.path import datetime if sys.version < '3': import", "= {\"type\": \"Point\", \"coordinates\": coordinates} point = {\"type\": \"Feature\", \"properties\":", "coordinates = [float(lng), float(lat)] geometry = {\"type\": \"Point\", \"coordinates\": coordinates}", "in doc.findall(\"points/point\"): number = p.find(\"number\").text if int(number) > MAXPOINT: continue", "fd.write(u'}') else: i = i + 1 fd.write(u'},\\n') if javascript:", "of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args = parser.parse_args() tree = etree.parse(args.infile)", "parameter, the generated file is a javascript # file defining", "parser.add_argument(\"--id\", help=\"id of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args = parser.parse_args() tree", "(in both directions), # skip one direction continue dist =", "not real points; they are used to mark # area", "> MAXPOINT: continue if int(src) < int(dst): # since all", "get_legs(tree, all_points): doc = tree.getroot() coast = [] offshore =", "(src, dst)) properties = {\"src\": src, \"dst\": dst, \"dist\": float(dist)}", "sys.version < '3': import codecs # points number 9000 and", "import codecs # points number 9000 and above are not", "distance: src: %s dst: %s\" % (src, dst)) properties =", "\"Feature\", \"properties\": properties, \"geometry\": geometry}, if sea == \"0\": coast.extend(leg)", "continue dist = p.find(\"dist\").text sea = p.find(\"sea\").text addtime = p.find(\"addtime\").text", "start_points = [] turning_points = [] for n in doc.findall(\"kretsar/krets/startpoints/number\"):", "'\"crs\": { \"type\": \"name\",' '\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },'", "return all_points, start_points, turning_points def get_legs(tree, all_points): doc = tree.getroot()", "eslint-disable */\\n') fd.write(u'export var basePodSpec = ') fd.write(u'{\"id\": %s, '", "parser.add_argument(\"-i\", \"--infile\", help=\"input file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output file\") parser.add_argument(\"--id\", help=\"id", "int(number) > MAXPOINT: continue name = p.find(\"name\").text descr = p.find(\"descr\").text", "fd.write(u'{\"id\": %s, ' % id) flen = len(features) i =", "True for p in doc.findall(\"points/point\"): number = p.find(\"number\").text if int(number)", "direction continue dist = p.find(\"dist\").text sea = p.find(\"sea\").text addtime =", "parser.add_argument(\"-o\", \"--outfile\", help=\"output file\") parser.add_argument(\"--id\", help=\"id of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\")", "\"0\": coast.extend(leg) else: offshore.extend(leg) return coast, offshore if __name__ ==", "action=\"store_true\") args = parser.parse_args() tree = etree.parse(args.infile) all_points, start_points, turning_points", "all legs are present twice (in both directions), # skip", "print(\"** error: no distance: src: %s dst: %s\" % (src,", "mark # area borders MAXPOINT=8999 def run(): parser = argparse.ArgumentParser()", "if javascript: fd.write(u'/* eslint-disable */\\n') fd.write(u'export var basePodSpec = ')", "get_legs(tree, all_points) output_pod(args.outfile, args.javascript, args.id, [('startPoints', start_points), ('turningPoints', turning_points), ('inshoreLegs',", "{ \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":' % name) fd.write(json.dumps(obj, ensure_ascii=False))", "start_points, turning_points def get_legs(tree, all_points): doc = tree.getroot() coast =", "turning_points.extend(point) all_points[number] = coordinates return all_points, start_points, turning_points def get_legs(tree,", "parser.parse_args() tree = etree.parse(args.infile) all_points, start_points, turning_points = get_points(tree) inshore_legs,", "\"properties\": properties, \"geometry\": geometry}, if sea == \"0\": coast.extend(leg) else:", "\"w\", encoding=\"utf-8\") else: fd = io.open(fname, \"w\", encoding=\"utf-8\") if javascript:", "{\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if sea == \"0\":", "= ') fd.write(u'{\"id\": %s, ' % id) flen = len(features)", "fd.write(u'},\\n') if javascript: fd.write(u'};\\n') else: fd.write(u'}\\n') def get_points(tree): doc =", "for n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True for p in", "PoD XML file to a GeoJSON file. # # With", "('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)]) def output_pod(fname, javascript, id, features): if", "= etree.parse(args.infile) all_points, start_points, turning_points = get_points(tree) inshore_legs, offshore_legs =", "if javascript: fd.write(u'};\\n') else: fd.write(u'}\\n') def get_points(tree): doc = tree.getroot()", "fd.write(u'export var basePodSpec = ') fd.write(u'{\"id\": %s, ' % id)", "\"--infile\", help=\"input file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output file\") parser.add_argument(\"--id\", help=\"id of", "if number in startnumbers: start_points.extend(point) else: turning_points.extend(point) all_points[number] = coordinates", "they are used to mark # area borders MAXPOINT=8999 def", "in startnumbers: start_points.extend(point) else: turning_points.extend(point) all_points[number] = coordinates return all_points,", "leg = {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if sea", "turning_points = get_points(tree) inshore_legs, offshore_legs = get_legs(tree, all_points) output_pod(args.outfile, args.javascript,", "all_points, start_points, turning_points def get_legs(tree, all_points): doc = tree.getroot() coast", "encoding=\"utf-8\") if javascript: fd.write(u'/* eslint-disable */\\n') fd.write(u'export var basePodSpec =", "a PoD XML file to a GeoJSON file. # #", "%s dst: %s\" % (src, dst)) properties = {\"src\": src,", "generated file is a javascript # file defining a variable", "obj) in features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\": { \"type\": \"name\",'", "footnote coordinates = [float(lng), float(lat)] geometry = {\"type\": \"Point\", \"coordinates\":", "= p.find(\"from\").text dst = p.find(\"to\").text if int(src) > MAXPOINT or", "MAXPOINT: continue name = p.find(\"name\").text descr = p.find(\"descr\").text lat =", "else: i = i + 1 fd.write(u'},\\n') if javascript: fd.write(u'};\\n')", "dst = p.find(\"to\").text if int(src) > MAXPOINT or int(dst) >", "all_points[dst] geometry = {\"type\": \"LineString\", \"coordinates\": [src_coords, dst_coords]} leg =", "number, \"name\": name, \"descr\": descr} if footnote != None: properties[\"footnote\"]", "if sys.version < '3': fd = codecs.open(fname, \"w\", encoding=\"utf-8\") else:", "p.find(\"number\").text if int(number) > MAXPOINT: continue name = p.find(\"name\").text descr", "json import io import sys import os.path import datetime if", "offshore_legs)]) def output_pod(fname, javascript, id, features): if sys.version < '3':", "def output_pod(fname, javascript, id, features): if sys.version < '3': fd", "GeoJSON file. # # With the --javascript parameter, the generated", "codecs # points number 9000 and above are not real", "= len(features) i = 1 for (name, obj) in features:", "#!/usr/bin/env python # Converts a PoD XML file to a", "< '3': fd = codecs.open(fname, \"w\", encoding=\"utf-8\") else: fd =", "all_points[src] dst_coords = all_points[dst] geometry = {\"type\": \"LineString\", \"coordinates\": [src_coords,", "flen: fd.write(u'}') else: i = i + 1 fd.write(u'},\\n') if", "p.find(\"descr\").text lat = p.find(\"lat\").text lng = p.find(\"long\").text footnote = None", "= p.find(\"lat\").text lng = p.find(\"long\").text footnote = None footnoteelem =", "run(): parser = argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input file\") parser.add_argument(\"-o\", \"--outfile\",", "# area borders MAXPOINT=8999 def run(): parser = argparse.ArgumentParser() parser.add_argument(\"-i\",", "import re import json import io import sys import os.path", "p.find(\"lat\").text lng = p.find(\"long\").text footnote = None footnoteelem = p.find(\"footnote\")", "= {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if number in", "= codecs.open(fname, \"w\", encoding=\"utf-8\") else: fd = io.open(fname, \"w\", encoding=\"utf-8\")", "turning_points = [] for n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True", "both directions), # skip one direction continue dist = p.find(\"dist\").text", "geometry}, if number in startnumbers: start_points.extend(point) else: turning_points.extend(point) all_points[number] =", "as etree import argparse import re import json import io", "is not None: footnote = footnoteelem.text properties = {\"number\": number,", "= p.find(\"to\").text if int(src) > MAXPOINT or int(dst) > MAXPOINT:", "= p.find(\"dist\").text sea = p.find(\"sea\").text addtime = p.find(\"addtime\").text if dist", "('offshoreLegs', offshore_legs)]) def output_pod(fname, javascript, id, features): if sys.version <", "!= None: properties[\"footnote\"] = footnote coordinates = [float(lng), float(lat)] geometry", "variable 'basePodSpec'. # # Get the PoD XML file from", "def get_points(tree): doc = tree.getroot() startnumbers = {} all_points =", "one direction continue dist = p.find(\"dist\").text sea = p.find(\"sea\").text addtime", "i == flen: fd.write(u'}') else: i = i + 1", "to mark # area borders MAXPOINT=8999 def run(): parser =", "import json import io import sys import os.path import datetime", "file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output file\") parser.add_argument(\"--id\", help=\"id of terrain\") parser.add_argument(\"--javascript\",", "With the --javascript parameter, the generated file is a javascript", "coordinates return all_points, start_points, turning_points def get_legs(tree, all_points): doc =", "continue name = p.find(\"name\").text descr = p.find(\"descr\").text lat = p.find(\"lat\").text", "file to a GeoJSON file. # # With the --javascript", "= footnote coordinates = [float(lng), float(lat)] geometry = {\"type\": \"Point\",", "properties = {\"number\": number, \"name\": name, \"descr\": descr} if footnote", "> MAXPOINT or int(dst) > MAXPOINT: continue if int(src) <", "inshore_legs), ('offshoreLegs', offshore_legs)]) def output_pod(fname, javascript, id, features): if sys.version", "argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output file\") parser.add_argument(\"--id\",", "import sys import os.path import datetime if sys.version < '3':", "\"--outfile\", help=\"output file\") parser.add_argument(\"--id\", help=\"id of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args", "fd = io.open(fname, \"w\", encoding=\"utf-8\") if javascript: fd.write(u'/* eslint-disable */\\n')", "basePodSpec = ') fd.write(u'{\"id\": %s, ' % id) flen =", "float(dist)} if properties[\"dist\"] == 0 and addtime == \"1\": properties[\"addtime\"]", "if sys.version < '3': import codecs # points number 9000", "\"type\": \"name\",' '\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":' %", "> MAXPOINT: continue name = p.find(\"name\").text descr = p.find(\"descr\").text lat", "is None: print(\"** error: no distance: src: %s dst: %s\"", "lng = p.find(\"long\").text footnote = None footnoteelem = p.find(\"footnote\") if", "addtime = p.find(\"addtime\").text if dist is None: print(\"** error: no", "= [] offshore = [] for p in doc.findall(\"legs/leg\"): src", "and addtime == \"1\": properties[\"addtime\"] = True; src_coords = all_points[src]", "legs are present twice (in both directions), # skip one", "borders MAXPOINT=8999 def run(): parser = argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input", "% (src, dst)) properties = {\"src\": src, \"dst\": dst, \"dist\":", "properties = {\"src\": src, \"dst\": dst, \"dist\": float(dist)} if properties[\"dist\"]", "dist = p.find(\"dist\").text sea = p.find(\"sea\").text addtime = p.find(\"addtime\").text if", "since all legs are present twice (in both directions), #", "int(dst): # since all legs are present twice (in both", "[] for n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True for p", "number in startnumbers: start_points.extend(point) else: turning_points.extend(point) all_points[number] = coordinates return", "above are not real points; they are used to mark", "'basePodSpec'. # # Get the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php.", "% id) flen = len(features) i = 1 for (name,", "in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True for p in doc.findall(\"points/point\"): number", "# # With the --javascript parameter, the generated file is", "parser.add_argument(\"--javascript\", action=\"store_true\") args = parser.parse_args() tree = etree.parse(args.infile) all_points, start_points,", "i = i + 1 fd.write(u'},\\n') if javascript: fd.write(u'};\\n') else:", "dst)) properties = {\"src\": src, \"dst\": dst, \"dist\": float(dist)} if", "None footnoteelem = p.find(\"footnote\") if footnoteelem is not None: footnote", "sea == \"0\": coast.extend(leg) else: offshore.extend(leg) return coast, offshore if", "features): if sys.version < '3': fd = codecs.open(fname, \"w\", encoding=\"utf-8\")", "float(lat)] geometry = {\"type\": \"Point\", \"coordinates\": coordinates} point = {\"type\":", "id) flen = len(features) i = 1 for (name, obj)", "javascript # file defining a variable 'basePodSpec'. # # Get", "[('startPoints', start_points), ('turningPoints', turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)]) def output_pod(fname,", "output_pod(fname, javascript, id, features): if sys.version < '3': fd =", "i + 1 fd.write(u'},\\n') if javascript: fd.write(u'};\\n') else: fd.write(u'}\\n') def", "= tree.getroot() coast = [] offshore = [] for p", "\"FeatureCollection\",' '\"crs\": { \"type\": \"name\",' '\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" }", "python # Converts a PoD XML file to a GeoJSON", "help=\"input file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output file\") parser.add_argument(\"--id\", help=\"id of terrain\")", "# since all legs are present twice (in both directions),", "are not real points; they are used to mark #", "inshore_legs, offshore_legs = get_legs(tree, all_points) output_pod(args.outfile, args.javascript, args.id, [('startPoints', start_points),", "# skip one direction continue dist = p.find(\"dist\").text sea =", "all_points, start_points, turning_points = get_points(tree) inshore_legs, offshore_legs = get_legs(tree, all_points)", "turning_points def get_legs(tree, all_points): doc = tree.getroot() coast = []", "properties, \"geometry\": geometry}, if sea == \"0\": coast.extend(leg) else: offshore.extend(leg)", "if i == flen: fd.write(u'}') else: i = i +", "descr = p.find(\"descr\").text lat = p.find(\"lat\").text lng = p.find(\"long\").text footnote", "all_points[number] = coordinates return all_points, start_points, turning_points def get_legs(tree, all_points):", "etree import argparse import re import json import io import", "are used to mark # area borders MAXPOINT=8999 def run():", "footnoteelem = p.find(\"footnote\") if footnoteelem is not None: footnote =", "file\") parser.add_argument(\"--id\", help=\"id of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args = parser.parse_args()", "int(src) < int(dst): # since all legs are present twice", "\"dist\": float(dist)} if properties[\"dist\"] == 0 and addtime == \"1\":", "encoding=\"utf-8\") else: fd = io.open(fname, \"w\", encoding=\"utf-8\") if javascript: fd.write(u'/*", "i = 1 for (name, obj) in features: fd.write(u'\"%s\": {\"type\":", "{\"src\": src, \"dst\": dst, \"dist\": float(dist)} if properties[\"dist\"] == 0", "= tree.getroot() startnumbers = {} all_points = {} start_points =", "'3': fd = codecs.open(fname, \"w\", encoding=\"utf-8\") else: fd = io.open(fname,", "else: fd.write(u'}\\n') def get_points(tree): doc = tree.getroot() startnumbers = {}", "== 0 and addtime == \"1\": properties[\"addtime\"] = True; src_coords", "p.find(\"addtime\").text if dist is None: print(\"** error: no distance: src:", "') fd.write(u'{\"id\": %s, ' % id) flen = len(features) i", "parser = argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input file\") parser.add_argument(\"-o\", \"--outfile\", help=\"output", "etree.parse(args.infile) all_points, start_points, turning_points = get_points(tree) inshore_legs, offshore_legs = get_legs(tree,", "defining a variable 'basePodSpec'. # # Get the PoD XML", "\"name\": name, \"descr\": descr} if footnote != None: properties[\"footnote\"] =", "src_coords = all_points[src] dst_coords = all_points[dst] geometry = {\"type\": \"LineString\",", "fd.write(u'};\\n') else: fd.write(u'}\\n') def get_points(tree): doc = tree.getroot() startnumbers =", "file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as etree import argparse import", "src: %s dst: %s\" % (src, dst)) properties = {\"src\":", "dist is None: print(\"** error: no distance: src: %s dst:", "< '3': import codecs # points number 9000 and above", "geometry}, if sea == \"0\": coast.extend(leg) else: offshore.extend(leg) return coast,", "},' '\"features\":' % name) fd.write(json.dumps(obj, ensure_ascii=False)) if i == flen:", "http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as etree import argparse import re import", "properties[\"footnote\"] = footnote coordinates = [float(lng), float(lat)] geometry = {\"type\":", "a GeoJSON file. # # With the --javascript parameter, the", "javascript: fd.write(u'};\\n') else: fd.write(u'}\\n') def get_points(tree): doc = tree.getroot() startnumbers", "datetime if sys.version < '3': import codecs # points number", "if properties[\"dist\"] == 0 and addtime == \"1\": properties[\"addtime\"] =", "= get_legs(tree, all_points) output_pod(args.outfile, args.javascript, args.id, [('startPoints', start_points), ('turningPoints', turning_points),", "MAXPOINT or int(dst) > MAXPOINT: continue if int(src) < int(dst):", "footnote = None footnoteelem = p.find(\"footnote\") if footnoteelem is not", "javascript, id, features): if sys.version < '3': fd = codecs.open(fname,", "the --javascript parameter, the generated file is a javascript #", "= [] for p in doc.findall(\"legs/leg\"): src = p.find(\"from\").text dst", "not None: footnote = footnoteelem.text properties = {\"number\": number, \"name\":", "[] offshore = [] for p in doc.findall(\"legs/leg\"): src =", "p in doc.findall(\"legs/leg\"): src = p.find(\"from\").text dst = p.find(\"to\").text if", "len(features) i = 1 for (name, obj) in features: fd.write(u'\"%s\":", "in doc.findall(\"legs/leg\"): src = p.find(\"from\").text dst = p.find(\"to\").text if int(src)", "True; src_coords = all_points[src] dst_coords = all_points[dst] geometry = {\"type\":", "turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)]) def output_pod(fname, javascript, id, features):", "tree.getroot() startnumbers = {} all_points = {} start_points = []", "io.open(fname, \"w\", encoding=\"utf-8\") if javascript: fd.write(u'/* eslint-disable */\\n') fd.write(u'export var", "a javascript # file defining a variable 'basePodSpec'. # #", "\"coordinates\": [src_coords, dst_coords]} leg = {\"type\": \"Feature\", \"properties\": properties, \"geometry\":", "else: turning_points.extend(point) all_points[number] = coordinates return all_points, start_points, turning_points def", "get_points(tree): doc = tree.getroot() startnumbers = {} all_points = {}", "def get_legs(tree, all_points): doc = tree.getroot() coast = [] offshore", "\"descr\": descr} if footnote != None: properties[\"footnote\"] = footnote coordinates", "help=\"id of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args = parser.parse_args() tree =", "= parser.parse_args() tree = etree.parse(args.infile) all_points, start_points, turning_points = get_points(tree)", "import xml.etree.ElementTree as etree import argparse import re import json", "\"coordinates\": coordinates} point = {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry},", "dst_coords = all_points[dst] geometry = {\"type\": \"LineString\", \"coordinates\": [src_coords, dst_coords]}", "skip one direction continue dist = p.find(\"dist\").text sea = p.find(\"sea\").text", "\"Point\", \"coordinates\": coordinates} point = {\"type\": \"Feature\", \"properties\": properties, \"geometry\":", "geometry = {\"type\": \"Point\", \"coordinates\": coordinates} point = {\"type\": \"Feature\",", "lat = p.find(\"lat\").text lng = p.find(\"long\").text footnote = None footnoteelem", "for (name, obj) in features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\": {", "n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True for p in doc.findall(\"points/point\"):", "p.find(\"dist\").text sea = p.find(\"sea\").text addtime = p.find(\"addtime\").text if dist is", "all_points) output_pod(args.outfile, args.javascript, args.id, [('startPoints', start_points), ('turningPoints', turning_points), ('inshoreLegs', inshore_legs),", "footnoteelem is not None: footnote = footnoteelem.text properties = {\"number\":", "args = parser.parse_args() tree = etree.parse(args.infile) all_points, start_points, turning_points =", "if int(number) > MAXPOINT: continue name = p.find(\"name\").text descr =", "dst, \"dist\": float(dist)} if properties[\"dist\"] == 0 and addtime ==", "\"dst\": dst, \"dist\": float(dist)} if properties[\"dist\"] == 0 and addtime", "id, features): if sys.version < '3': fd = codecs.open(fname, \"w\",", "get_points(tree) inshore_legs, offshore_legs = get_legs(tree, all_points) output_pod(args.outfile, args.javascript, args.id, [('startPoints',", "ensure_ascii=False)) if i == flen: fd.write(u'}') else: i = i", "are present twice (in both directions), # skip one direction", "int(src) > MAXPOINT or int(dst) > MAXPOINT: continue if int(src)", "if footnote != None: properties[\"footnote\"] = footnote coordinates = [float(lng),", "int(dst) > MAXPOINT: continue if int(src) < int(dst): # since", "doc = tree.getroot() coast = [] offshore = [] for", "\"properties\": properties, \"geometry\": geometry}, if number in startnumbers: start_points.extend(point) else:", "import datetime if sys.version < '3': import codecs # points", "PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as etree import", "startnumbers: start_points.extend(point) else: turning_points.extend(point) all_points[number] = coordinates return all_points, start_points,", "{\"type\": \"Point\", \"coordinates\": coordinates} point = {\"type\": \"Feature\", \"properties\": properties,", "doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True for p in doc.findall(\"points/point\"): number =", "properties[\"dist\"] == 0 and addtime == \"1\": properties[\"addtime\"] = True;", "import os.path import datetime if sys.version < '3': import codecs", "{} start_points = [] turning_points = [] for n in", "re import json import io import sys import os.path import", "= {} all_points = {} start_points = [] turning_points =", "\"geometry\": geometry}, if number in startnumbers: start_points.extend(point) else: turning_points.extend(point) all_points[number]", "io import sys import os.path import datetime if sys.version <", "'\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":' % name) fd.write(json.dumps(obj,", "= True; src_coords = all_points[src] dst_coords = all_points[dst] geometry =", "= [] for n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text] = True for", "\"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":' % name) fd.write(json.dumps(obj, ensure_ascii=False)) if i", "file defining a variable 'basePodSpec'. # # Get the PoD", "= None footnoteelem = p.find(\"footnote\") if footnoteelem is not None:", "= io.open(fname, \"w\", encoding=\"utf-8\") if javascript: fd.write(u'/* eslint-disable */\\n') fd.write(u'export", "def run(): parser = argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input file\") parser.add_argument(\"-o\",", "terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args = parser.parse_args() tree = etree.parse(args.infile) all_points,", "fd.write(u'/* eslint-disable */\\n') fd.write(u'export var basePodSpec = ') fd.write(u'{\"id\": %s,", "if sea == \"0\": coast.extend(leg) else: offshore.extend(leg) return coast, offshore", "p in doc.findall(\"points/point\"): number = p.find(\"number\").text if int(number) > MAXPOINT:", "(name, obj) in features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\": { \"type\":", "footnoteelem.text properties = {\"number\": number, \"name\": name, \"descr\": descr} if", "sys.version < '3': fd = codecs.open(fname, \"w\", encoding=\"utf-8\") else: fd", "% name) fd.write(json.dumps(obj, ensure_ascii=False)) if i == flen: fd.write(u'}') else:", "sea = p.find(\"sea\").text addtime = p.find(\"addtime\").text if dist is None:", "descr} if footnote != None: properties[\"footnote\"] = footnote coordinates =", "import io import sys import os.path import datetime if sys.version", "from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as etree import argparse import re", "xml.etree.ElementTree as etree import argparse import re import json import", "all_points): doc = tree.getroot() coast = [] offshore = []", "src, \"dst\": dst, \"dist\": float(dist)} if properties[\"dist\"] == 0 and", "real points; they are used to mark # area borders", "# points number 9000 and above are not real points;", "offshore = [] for p in doc.findall(\"legs/leg\"): src = p.find(\"from\").text", "\"1\": properties[\"addtime\"] = True; src_coords = all_points[src] dst_coords = all_points[dst]", "= p.find(\"number\").text if int(number) > MAXPOINT: continue name = p.find(\"name\").text", "all_points = {} start_points = [] turning_points = [] for", "if int(src) < int(dst): # since all legs are present", "properties, \"geometry\": geometry}, if number in startnumbers: start_points.extend(point) else: turning_points.extend(point)", "= footnoteelem.text properties = {\"number\": number, \"name\": name, \"descr\": descr}", "XML file to a GeoJSON file. # # With the", "MAXPOINT=8999 def run(): parser = argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\", help=\"input file\")", "properties[\"addtime\"] = True; src_coords = all_points[src] dst_coords = all_points[dst] geometry", "1 fd.write(u'},\\n') if javascript: fd.write(u'};\\n') else: fd.write(u'}\\n') def get_points(tree): doc", "doc.findall(\"legs/leg\"): src = p.find(\"from\").text dst = p.find(\"to\").text if int(src) >", "' % id) flen = len(features) i = 1 for", "= all_points[src] dst_coords = all_points[dst] geometry = {\"type\": \"LineString\", \"coordinates\":", "= p.find(\"descr\").text lat = p.find(\"lat\").text lng = p.find(\"long\").text footnote =", "# file defining a variable 'basePodSpec'. # # Get the", "Get the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import xml.etree.ElementTree as", "fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\": { \"type\": \"name\",' '\"properties\": { \"name\":", "None: footnote = footnoteelem.text properties = {\"number\": number, \"name\": name,", "{\"number\": number, \"name\": name, \"descr\": descr} if footnote != None:", "{\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if number in startnumbers:", "= True for p in doc.findall(\"points/point\"): number = p.find(\"number\").text if", "point = {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if number", "start_points.extend(point) else: turning_points.extend(point) all_points[number] = coordinates return all_points, start_points, turning_points", "startnumbers[n.text] = True for p in doc.findall(\"points/point\"): number = p.find(\"number\").text", "file. # # With the --javascript parameter, the generated file", "p.find(\"from\").text dst = p.find(\"to\").text if int(src) > MAXPOINT or int(dst)", "'\"features\":' % name) fd.write(json.dumps(obj, ensure_ascii=False)) if i == flen: fd.write(u'}')", "\"w\", encoding=\"utf-8\") if javascript: fd.write(u'/* eslint-disable */\\n') fd.write(u'export var basePodSpec", "directions), # skip one direction continue dist = p.find(\"dist\").text sea", "is a javascript # file defining a variable 'basePodSpec'. #", "startnumbers = {} all_points = {} start_points = [] turning_points", "and above are not real points; they are used to", "continue if int(src) < int(dst): # since all legs are", "dst_coords]} leg = {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if", "# # Get the PoD XML file from http://dev.24-timmars.nu/PoD/xmlapi_app.php. import", "= coordinates return all_points, start_points, turning_points def get_legs(tree, all_points): doc", "= p.find(\"long\").text footnote = None footnoteelem = p.find(\"footnote\") if footnoteelem", "var basePodSpec = ') fd.write(u'{\"id\": %s, ' % id) flen", "help=\"output file\") parser.add_argument(\"--id\", help=\"id of terrain\") parser.add_argument(\"--javascript\", action=\"store_true\") args =", "= {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if sea ==", "'3': import codecs # points number 9000 and above are", "[float(lng), float(lat)] geometry = {\"type\": \"Point\", \"coordinates\": coordinates} point =", "None: print(\"** error: no distance: src: %s dst: %s\" %", "args.javascript, args.id, [('startPoints', start_points), ('turningPoints', turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)])", "+ 1 fd.write(u'},\\n') if javascript: fd.write(u'};\\n') else: fd.write(u'}\\n') def get_points(tree):", "name = p.find(\"name\").text descr = p.find(\"descr\").text lat = p.find(\"lat\").text lng", "= all_points[dst] geometry = {\"type\": \"LineString\", \"coordinates\": [src_coords, dst_coords]} leg", "\"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":' % name) fd.write(json.dumps(obj, ensure_ascii=False)) if", "tree.getroot() coast = [] offshore = [] for p in", "for p in doc.findall(\"legs/leg\"): src = p.find(\"from\").text dst = p.find(\"to\").text", "if footnoteelem is not None: footnote = footnoteelem.text properties =", "# Converts a PoD XML file to a GeoJSON file.", "= {} start_points = [] turning_points = [] for n", "footnote != None: properties[\"footnote\"] = footnote coordinates = [float(lng), float(lat)]", "\"LineString\", \"coordinates\": [src_coords, dst_coords]} leg = {\"type\": \"Feature\", \"properties\": properties,", "fd.write(json.dumps(obj, ensure_ascii=False)) if i == flen: fd.write(u'}') else: i =", "{\"type\": \"FeatureCollection\",' '\"crs\": { \"type\": \"name\",' '\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\"", "\"Feature\", \"properties\": properties, \"geometry\": geometry}, if number in startnumbers: start_points.extend(point)", "argparse import re import json import io import sys import", "coast.extend(leg) else: offshore.extend(leg) return coast, offshore if __name__ == '__main__':", "output_pod(args.outfile, args.javascript, args.id, [('startPoints', start_points), ('turningPoints', turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs',", "twice (in both directions), # skip one direction continue dist", "== \"0\": coast.extend(leg) else: offshore.extend(leg) return coast, offshore if __name__", "{ \"type\": \"name\",' '\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":'", "name, \"descr\": descr} if footnote != None: properties[\"footnote\"] = footnote", "present twice (in both directions), # skip one direction continue", "= i + 1 fd.write(u'},\\n') if javascript: fd.write(u'};\\n') else: fd.write(u'}\\n')", "Converts a PoD XML file to a GeoJSON file. #", "used to mark # area borders MAXPOINT=8999 def run(): parser", "== \"1\": properties[\"addtime\"] = True; src_coords = all_points[src] dst_coords =", "a variable 'basePodSpec'. # # Get the PoD XML file", "= {\"type\": \"LineString\", \"coordinates\": [src_coords, dst_coords]} leg = {\"type\": \"Feature\",", "points number 9000 and above are not real points; they", "= 1 for (name, obj) in features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",'", "= p.find(\"name\").text descr = p.find(\"descr\").text lat = p.find(\"lat\").text lng =", "args.id, [('startPoints', start_points), ('turningPoints', turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)]) def", "= get_points(tree) inshore_legs, offshore_legs = get_legs(tree, all_points) output_pod(args.outfile, args.javascript, args.id,", "doc = tree.getroot() startnumbers = {} all_points = {} start_points", "= p.find(\"sea\").text addtime = p.find(\"addtime\").text if dist is None: print(\"**", "[src_coords, dst_coords]} leg = {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry},", "os.path import datetime if sys.version < '3': import codecs #", "coordinates} point = {\"type\": \"Feature\", \"properties\": properties, \"geometry\": geometry}, if", "dst: %s\" % (src, dst)) properties = {\"src\": src, \"dst\":", "[] for p in doc.findall(\"legs/leg\"): src = p.find(\"from\").text dst =", "('turningPoints', turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)]) def output_pod(fname, javascript, id,", "start_points), ('turningPoints', turning_points), ('inshoreLegs', inshore_legs), ('offshoreLegs', offshore_legs)]) def output_pod(fname, javascript,", "fd.write(u'}\\n') def get_points(tree): doc = tree.getroot() startnumbers = {} all_points", "features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\": { \"type\": \"name\",' '\"properties\": {", "no distance: src: %s dst: %s\" % (src, dst)) properties", "= {\"src\": src, \"dst\": dst, \"dist\": float(dist)} if properties[\"dist\"] ==", "\"name\",' '\"properties\": { \"name\": \"urn:ogc:def:crs:OGC:1.3:CRS84\" } },' '\"features\":' % name)", "= p.find(\"footnote\") if footnoteelem is not None: footnote = footnoteelem.text", "geometry = {\"type\": \"LineString\", \"coordinates\": [src_coords, dst_coords]} leg = {\"type\":", "offshore_legs = get_legs(tree, all_points) output_pod(args.outfile, args.javascript, args.id, [('startPoints', start_points), ('turningPoints',", "else: fd = io.open(fname, \"w\", encoding=\"utf-8\") if javascript: fd.write(u'/* eslint-disable", "== flen: fd.write(u'}') else: i = i + 1 fd.write(u'},\\n')", "addtime == \"1\": properties[\"addtime\"] = True; src_coords = all_points[src] dst_coords", "< int(dst): # since all legs are present twice (in", "p.find(\"to\").text if int(src) > MAXPOINT or int(dst) > MAXPOINT: continue", "tree = etree.parse(args.infile) all_points, start_points, turning_points = get_points(tree) inshore_legs, offshore_legs", "if int(src) > MAXPOINT or int(dst) > MAXPOINT: continue if", "javascript: fd.write(u'/* eslint-disable */\\n') fd.write(u'export var basePodSpec = ') fd.write(u'{\"id\":", "--javascript parameter, the generated file is a javascript # file", "codecs.open(fname, \"w\", encoding=\"utf-8\") else: fd = io.open(fname, \"w\", encoding=\"utf-8\") if", "} },' '\"features\":' % name) fd.write(json.dumps(obj, ensure_ascii=False)) if i ==", "%s, ' % id) flen = len(features) i = 1", "if dist is None: print(\"** error: no distance: src: %s", "= [] turning_points = [] for n in doc.findall(\"kretsar/krets/startpoints/number\"): startnumbers[n.text]", "= {\"number\": number, \"name\": name, \"descr\": descr} if footnote !=", "9000 and above are not real points; they are used", "MAXPOINT: continue if int(src) < int(dst): # since all legs", "p.find(\"sea\").text addtime = p.find(\"addtime\").text if dist is None: print(\"** error:", "%s\" % (src, dst)) properties = {\"src\": src, \"dst\": dst,", "file is a javascript # file defining a variable 'basePodSpec'.", "p.find(\"long\").text footnote = None footnoteelem = p.find(\"footnote\") if footnoteelem is", "else: offshore.extend(leg) return coast, offshore if __name__ == '__main__': run()", "p.find(\"footnote\") if footnoteelem is not None: footnote = footnoteelem.text properties", "start_points, turning_points = get_points(tree) inshore_legs, offshore_legs = get_legs(tree, all_points) output_pod(args.outfile,", "number = p.find(\"number\").text if int(number) > MAXPOINT: continue name =", "src = p.find(\"from\").text dst = p.find(\"to\").text if int(src) > MAXPOINT", "area borders MAXPOINT=8999 def run(): parser = argparse.ArgumentParser() parser.add_argument(\"-i\", \"--infile\",", "in features: fd.write(u'\"%s\": {\"type\": \"FeatureCollection\",' '\"crs\": { \"type\": \"name\",' '\"properties\":", "0 and addtime == \"1\": properties[\"addtime\"] = True; src_coords =" ]
[ "self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class): \"\"\"Registers an EvaluatorDefaultProvider for use as", "with double quotes lets us parse it as a JSON", "def register_default_raster_source(self, provider_class): \"\"\"Registers a RasterSourceDefaultProvider for use as a", "is already registered for command' 'with type {}'.format(command_type)) self.aux_command_classes[command_type] =", "to their \"register_plugin\" method. Args: plugin_config - the everett ConfigManager", "used for this plugin. This will be used to construct", "double quotes lets us parse it as a JSON list.", "= [] plugin_base = PluginBase(package='rastervision.plugins') for uri in plugin_paths: plugin_name", "plugin. Args: group - The Config group, e.g. rv.BACKEND, rv.TASK.", "[] plugin_base = PluginBase(package='rastervision.plugins') for uri in plugin_paths: plugin_name =", "string that will match the command line argument used to", "if the key is \"FOO_RUNNER\", then users can use the", "' instead of \". Replacing # single quotes with double", "command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder is already registered for", "key - The key used for this plugin. This will", "a ConfigBuilder as a plugin. Args: group - The Config", "for use as a plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class): \"\"\"Registers", "builder_class - The subclass of CommandConfigBuilder that builds the CommandConfig", "'CommandConfigBuilder already registered for command' 'with type {}'.format(command_type)) self.command_config_builders[command_type] =", "use the runner by issuing a \"rastervision run foo_runner ...\"", "source # to keep it from getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name),", "this plugin runner. This is a string that will match", "subclass of CommandConfigBuilder that builds the CommandConfig for this plugin.", "load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files = plugin_files plugin_modules = load_conf_list(plugin_config('modules', default='[]'))", "return self.plugin_sources = [] plugin_base = PluginBase(package='rastervision.plugins') for uri in", "rv._registry.get_file_system(uri, search_plugins=False) local_path = download_if_needed(uri, plugin_path, fs=fs) local_dir = os.path.dirname(local_path)", "\"\"\" if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder already registered", "os import json import importlib from pluginbase import PluginBase import", "PluginConfig as PluginConfigMsg from rastervision.utils.files import download_if_needed class PluginError(Exception): pass", "instead of \". Replacing # single quotes with double quotes", "group, key, builder_class): \"\"\"Registers a ConfigBuilder as a plugin. Args:", "reference this runner; e.g. if the key is \"FOO_RUNNER\", then", "{} self.default_raster_sources = [] self.default_vector_sources = [] self.default_label_sources = []", "'but it is not callable'.format(identifier)) # TODO: Log loading plugin.", "line argument used to reference this runner; e.g. if the", "will match the command line argument used to reference this", "Config for this plugin. \"\"\" if (group, key) in self.config_builders:", "key)] = builder_class def register_command_config_builder(self, command_type, builder_class): \"\"\"Registers a ConfigBuilder", "a \"rastervision run foo_runner ...\" command. runner_class - The class", "having a list-like string, with ' instead of \". Replacing", "...\" command. runner_class - The class of the ExperimentRunner plugin.", "callable(register_method): raise PluginError('Plugin at {} has a ' '\"register_plugin\" attribute,", "in a \".builder(key)\" call. builder_class - The subclass of CommandConfigBuilder", "rv._registry._get_plugin_registry() def __init__(self, plugin_config, rv_home): \"\"\"Initializes this plugin registry. A", "module) def add_plugins_from_proto(self, plugin_msg): new_plugin_files = list( set(plugin_msg.plugin_uris) - set(self.plugin_files))", "plugin_modules=self.plugin_modules) def register_config_builder(self, group, key, builder_class): \"\"\"Registers a ConfigBuilder as", "' '\"register_plugin\" attribute, ' 'but it is not callable'.format(identifier)) #", "import importlib from pluginbase import PluginBase import rastervision as rv", "if not hasattr(plugin, 'register_plugin'): raise PluginError('Plugin at {} does not", "plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class): \"\"\"Registers a VectorSourceDefaultProvider for use", "builder_class def register_command_config_builder(self, command_type, builder_class): \"\"\"Registers a ConfigBuilder as a", "transformed to # having a list-like string, with ' instead", "plugin runner. This is a string that will match the", "= [] self.aux_command_classes = {} self.default_raster_sources = [] self.default_vector_sources =", "We're required to hang onto the source # to keep", "class PluginError(Exception): pass def load_conf_list(s): \"\"\"Loads a list of items", "PluginRegistry: @staticmethod def get_instance(): return rv._registry._get_plugin_registry() def __init__(self, plugin_config, rv_home):", "and key {}'.format(group, key)) self.config_builders[(group, key)] = builder_class def register_command_config_builder(self,", "= download_if_needed(uri, plugin_path, fs=fs) local_dir = os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source(", "def _load_from_modules(self, plugin_modules): if not plugin_modules: return for module in", "Vision allowed for a `[ \"module\" ]` like syntax, even", "= [] self.default_label_stores = [] self.default_evaluators = [] self.experiment_runners =", "plugin_modules: return for module in plugin_modules: plugin = importlib.import_module(module) self._load_plugin(plugin,", "`[ \"module\" ]` like syntax, even though that didn't work", "- The key used for this plugin. This will be", "self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns a protobuf message that records", "register_method = getattr(plugin, 'register_plugin') if not callable(register_method): raise PluginError('Plugin at", "plugin_config - the everett ConfigManager for the plugin section of", "download_if_needed(uri, plugin_path, fs=fs) local_dir = os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir])", "self.config_builders[(group, key)] = builder_class def register_command_config_builder(self, command_type, builder_class): \"\"\"Registers a", "[] self.aux_command_classes = {} self.default_raster_sources = [] self.default_vector_sources = []", "provider_class): \"\"\"Registers a VectorSourceDefaultProvider for use as a plugin.\"\"\" self.default_vector_sources.append(provider_class)", "= {} self.filesystems = [] plugin_files = load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files)", "= os.path.join(self.plugin_root_dir, plugin_name) fs = rv._registry.get_file_system(uri, search_plugins=False) local_path = download_if_needed(uri,", "self.experiment_runners = {} self.filesystems = [] plugin_files = load_conf_list(plugin_config('files', default='[]'))", "a `[ \"module\" ]` like syntax, even though that didn't", "VectorSourceDefaultProvider for use as a plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class):", "plugin registry. A plugin registry is passed to plugins in", "with ' instead of \". Replacing # single quotes with", "download_if_needed class PluginError(Exception): pass def load_conf_list(s): \"\"\"Loads a list of", "a ConfigBuilder as a plugin. Args: command_type - The key", "a plugin. Args: runner_key - The key used to reference", "plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir, plugin_name) fs = rv._registry.get_file_system(uri,", "PluginError('AuxCommand is already registered for command' 'with type {}'.format(command_type)) self.aux_command_classes[command_type]", "command' 'with type {}'.format(command_type)) if command_type in self.aux_command_classes: raise PluginError('AuxCommand", "= list( set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules = list(", "self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class): \"\"\"Registers a VectorSourceDefaultProvider for use as", "quotes lets us parse it as a JSON list. return", "key used to reference this plugin runner. This is a", "is already registered for command' 'with type {}'.format(command_type)) if command_type", "json import importlib from pluginbase import PluginBase import rastervision as", "this runner; e.g. if the key is \"FOO_RUNNER\", then users", "add_plugins_from_proto(self, plugin_msg): new_plugin_files = list( set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files)", "register_default_label_store(self, provider_class): \"\"\"Registers a LabelStoreDefaultProvider for use as a plugin.\"\"\"", "list of values will be transformed to # having a", "foo_runner ...\" command. runner_class - The class of the ExperimentRunner", "\".builder(key)\" call. builder_class - The subclass of ConfigBuilder that builds", "hasattr(plugin, 'register_plugin'): raise PluginError('Plugin at {} does not have '", "ConfigManager for the plugin section of the application configuration. \"\"\"", "ExperimentRunner as a plugin. Args: runner_key - The key used", "in a \".builder(key)\" call. builder_class - The subclass of ConfigBuilder", "self.aux_command_classes[command_type] = command_class if command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self, provider_class): \"\"\"Registers", "in self.aux_command_classes: raise PluginError('AuxCommand is already registered for command' 'with", "register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers an ExperimentRunner as a plugin. Args:", "load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules def _load_plugin(self, plugin, identifier):", "for command' 'with type {}'.format(command_type)) self.command_config_builders[command_type] = builder_class self.commands.append(command_type) def", "if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder is already registered", "the key is \"FOO_RUNNER\", then users can use the runner", "set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules = list( set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules)", "self.default_raster_sources = [] self.default_vector_sources = [] self.default_label_sources = [] self.default_label_stores", "= plugin_files plugin_modules = load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules", "\"module\" ]` like syntax, even though that didn't work for", "register_default_evaluator(self, provider_class): \"\"\"Registers an EvaluatorDefaultProvider for use as a plugin.\"\"\"", "get_instance(): return rv._registry._get_plugin_registry() def __init__(self, plugin_config, rv_home): \"\"\"Initializes this plugin", "if not plugin_modules: return for module in plugin_modules: plugin =", "return rv._registry._get_plugin_registry() def __init__(self, plugin_config, rv_home): \"\"\"Initializes this plugin registry.", "\"\"\" if (group, key) in self.config_builders: raise PluginError('ConfigBuilder already registered", "custom AuxCommand as a plugin. Args: command_type - The key", "a LabelSourceDefaultProvider for use as a plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self,", "be transformed to # having a list-like string, with '", "registry. A plugin registry is passed to plugins in a", "to construct the builder in a \".builder(key)\" call. command_class -", "if not callable(register_method): raise PluginError('Plugin at {} has a '", "Config group, e.g. rv.BACKEND, rv.TASK. key - The key used", "this plugin. This will be used to construct the builder", "self.commands.append(command_type) def register_aux_command(self, command_type, command_class): \"\"\"Registers a custom AuxCommand as", "getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self, plugin_modules): if not", "be comma separated. This takes into account that previous versions", "\"rastervision run foo_runner ...\" command. runner_class - The class of", "a LabelStoreDefaultProvider for use as a plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self,", "runner by issuing a \"rastervision run foo_runner ...\" command. runner_class", "' 'but it is not callable'.format(identifier)) # TODO: Log loading", "call. builder_class - The subclass of ConfigBuilder that builds the", "= runner_class def register_filesystem(self, filesystem_class): \"\"\"Registers a FileSystem as a", "= [] plugin_files = load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files = plugin_files", "Args: group - The Config group, e.g. rv.BACKEND, rv.TASK. key", "a RasterSourceDefaultProvider for use as a plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self,", "register_command_config_builder(self, command_type, builder_class): \"\"\"Registers a ConfigBuilder as a plugin. Args:", "[] self.experiment_runners = {} self.filesystems = [] plugin_files = load_conf_list(plugin_config('files',", "the everett ConfigManager for the plugin section of the application", "to reference this plugin runner. This is a string that", "a string that will match the command line argument used", "A plugin registry is passed to plugins in a call", "plugin_modules = load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules def _load_plugin(self,", "plugin_config, rv_home): \"\"\"Initializes this plugin registry. A plugin registry is", "self.default_vector_sources = [] self.default_label_sources = [] self.default_label_stores = [] self.default_evaluators", "of AuxCommand subclass to register. \"\"\" if command_type in self.command_config_builders:", "set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns a protobuf message that", "work for multi-value lists. \"\"\" try: # A comma separated", "'\"register_plugin\" method.'.format(identifier)) register_method = getattr(plugin, 'register_plugin') if not callable(register_method): raise", "a VectorSourceDefaultProvider for use as a plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self,", "def load_conf_list(s): \"\"\"Loads a list of items from the config.", "to reference this runner; e.g. if the key is \"FOO_RUNNER\",", "self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self, plugin_modules): if not plugin_modules: return for", "a protobuf message that records the plugin sources for plugins", "use as a plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class): \"\"\"Registers a", "the config. Lists should be comma separated. This takes into", "uri) def _load_from_modules(self, plugin_modules): if not plugin_modules: return for module", "= command_class if command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self, provider_class): \"\"\"Registers a", "- the everett ConfigManager for the plugin section of the", "a plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class): \"\"\"Registers a LabelStoreDefaultProvider for", "def get_instance(): return rv._registry._get_plugin_registry() def __init__(self, plugin_config, rv_home): \"\"\"Initializes this", "config. Lists should be comma separated. This takes into account", "def register_default_label_store(self, provider_class): \"\"\"Registers a LabelStoreDefaultProvider for use as a", "Raster Vision allowed for a `[ \"module\" ]` like syntax,", "= os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir, plugin_name) fs = rv._registry.get_file_system(uri, search_plugins=False)", "Args: command_type - The key used for this plugin. This", "users can use the runner by issuing a \"rastervision run", "as a plugin. Args: runner_key - The key used to", "attribute, ' 'but it is not callable'.format(identifier)) # TODO: Log", "command_type, command_class): \"\"\"Registers a custom AuxCommand as a plugin. Args:", "plugin registry is passed to plugins in a call to", "class PluginRegistry: @staticmethod def get_instance(): return rv._registry._get_plugin_registry() def __init__(self, plugin_config,", "local_path = download_if_needed(uri, plugin_path, fs=fs) local_dir = os.path.dirname(local_path) plugin_source =", "have ' '\"register_plugin\" method.'.format(identifier)) register_method = getattr(plugin, 'register_plugin') if not", "quotes with double quotes lets us parse it as a", "Args: plugin_config - the everett ConfigManager for the plugin section", "a \".builder(key)\" call. command_class - The subclass of AuxCommand subclass", "self.aux_command_classes: raise PluginError('AuxCommand is already registered for command' 'with type", "runner_key, runner_class): \"\"\"Registers an ExperimentRunner as a plugin. Args: runner_key", "raise PluginError('Plugin at {} does not have ' '\"register_plugin\" method.'.format(identifier))", "provider_class): \"\"\"Registers a LabelSourceDefaultProvider for use as a plugin.\"\"\" self.default_label_sources.append(provider_class)", "is not callable'.format(identifier)) # TODO: Log loading plugin. register_method(self) def", "\".builder(key)\" call. builder_class - The subclass of CommandConfigBuilder that builds", "subclass to register. \"\"\" if command_type in self.command_config_builders: raise PluginError(", "for use as a plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class): \"\"\"Registers", "plugin. \"\"\" if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder already", "the ExperimentRunner plugin. \"\"\" if runner_key in self.experiment_runners: raise PluginError('ExperimentRunner", "self.plugin_files = plugin_files plugin_modules = load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules =", "of Raster Vision allowed for a `[ \"module\" ]` like", "builder_class): \"\"\"Registers a ConfigBuilder as a plugin. Args: command_type -", "{} self.filesystems = [] plugin_files = load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files", "plugin is valid if not hasattr(plugin, 'register_plugin'): raise PluginError('Plugin at", "list( set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules = list( set(plugin_msg.plugin_modules)", "set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns a protobuf", "_load_from_modules(self, plugin_modules): if not plugin_modules: return for module in plugin_modules:", "a call to their \"register_plugin\" method. Args: plugin_config - the", "used to construct the builder in a \".builder(key)\" call. command_class", "group ' '{} and key {}'.format(group, key)) self.config_builders[(group, key)] =", "provider_class): \"\"\"Registers an EvaluatorDefaultProvider for use as a plugin.\"\"\" self.default_evaluators.append(provider_class)", "\"FOO_RUNNER\", then users can use the runner by issuing a", "The key used to reference this plugin runner. This is", "of the application configuration. \"\"\" self.plugin_root_dir = os.path.join(rv_home, 'plugins') self.config_builders", "list of items from the config. Lists should be comma", "registered for command' 'with type {}'.format(command_type)) self.command_config_builders[command_type] = builder_class self.commands.append(command_type)", "e.g. rv.BACKEND, rv.TASK. key - The key used for this", "'with type {}'.format(command_type)) self.command_config_builders[command_type] = builder_class self.commands.append(command_type) def register_aux_command(self, command_type,", "LabelSourceDefaultProvider for use as a plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class):", "plugin_modules: plugin = importlib.import_module(module) self._load_plugin(plugin, module) def add_plugins_from_proto(self, plugin_msg): new_plugin_files", "\"register_plugin\" method. Args: plugin_config - the everett ConfigManager for the", "from pluginbase import PluginBase import rastervision as rv from rastervision.protos.plugin_pb2", "type {}'.format(command_type)) self.command_config_builders[command_type] = builder_class self.commands.append(command_type) def register_aux_command(self, command_type, command_class):", "separated list of values will be transformed to # having", "plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir]) # We're required to hang onto", "it from getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self, plugin_modules):", "the source # to keep it from getting GC'd. self.plugin_sources.append(plugin_source)", "a JSON list. return json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError: return list(map(lambda", "subclass of ConfigBuilder that builds the Config for this plugin.", "a custom AuxCommand as a plugin. Args: command_type - The", "def register_default_label_source(self, provider_class): \"\"\"Registers a LabelSourceDefaultProvider for use as a", "as PluginConfigMsg from rastervision.utils.files import download_if_needed class PluginError(Exception): pass def", "rv.TASK. key - The key used for this plugin. This", "list(map(lambda x: x.strip(), s.split(','))) class PluginRegistry: @staticmethod def get_instance(): return", "\". Replacing # single quotes with double quotes lets us", "' 'key {}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class def register_filesystem(self, filesystem_class): \"\"\"Registers", "type {}'.format(command_type)) if command_type in self.aux_command_classes: raise PluginError('AuxCommand is already", "the Config for this plugin. \"\"\" if (group, key) in", "reference this plugin runner. This is a string that will", "[] self.default_label_sources = [] self.default_label_stores = [] self.default_evaluators = []", "protobuf message that records the plugin sources for plugins that", "loaded in the registry. \"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def", "registered for group ' '{} and key {}'.format(group, key)) self.config_builders[(group,", "plugin_files = load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files = plugin_files plugin_modules =", "takes into account that previous versions of Raster Vision allowed", "ConfigBuilder as a plugin. Args: command_type - The key used", "a plugin. Args: command_type - The key used for this", "in plugin_modules: plugin = importlib.import_module(module) self._load_plugin(plugin, module) def add_plugins_from_proto(self, plugin_msg):", "plugin = importlib.import_module(module) self._load_plugin(plugin, module) def add_plugins_from_proto(self, plugin_msg): new_plugin_files =", "be used to construct the builder in a \".builder(key)\" call.", "= [] self.default_evaluators = [] self.experiment_runners = {} self.filesystems =", "plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir, plugin_name) fs =", "- set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules = list( set(plugin_msg.plugin_modules) - set(self.plugin_modules))", "the plugin sources for plugins that are currently loaded in", "to keep it from getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def", "group - The Config group, e.g. rv.BACKEND, rv.TASK. key -", "x.strip(), s.split(','))) class PluginRegistry: @staticmethod def get_instance(): return rv._registry._get_plugin_registry() def", "builder_class self.commands.append(command_type) def register_aux_command(self, command_type, command_class): \"\"\"Registers a custom AuxCommand", "\"\"\"Registers a RasterSourceDefaultProvider for use as a plugin.\"\"\" self.default_raster_sources.append(provider_class) def", "by issuing a \"rastervision run foo_runner ...\" command. runner_class -", "an EvaluatorDefaultProvider for use as a plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self,", "The class of the ExperimentRunner plugin. \"\"\" if runner_key in", "\"\"\"Registers a LabelStoreDefaultProvider for use as a plugin.\"\"\" self.default_label_stores.append(provider_class) def", "subclass of AuxCommand subclass to register. \"\"\" if command_type in", "runner_key - The key used to reference this plugin runner.", "import rastervision as rv from rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg", "command line argument used to reference this runner; e.g. if", "the command line argument used to reference this runner; e.g.", "plugin_modules def _load_plugin(self, plugin, identifier): # Check the plugin is", "identifier): # Check the plugin is valid if not hasattr(plugin,", "self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules def _load_plugin(self, plugin, identifier): # Check", "# Check the plugin is valid if not hasattr(plugin, 'register_plugin'):", "construct the builder in a \".builder(key)\" call. command_class - The", "to construct the builder in a \".builder(key)\" call. builder_class -", "\"\"\"Registers a custom AuxCommand as a plugin. Args: command_type -", "- The Config group, e.g. rv.BACKEND, rv.TASK. key - The", "- The class of the ExperimentRunner plugin. \"\"\" if runner_key", "os.path.join(self.plugin_root_dir, plugin_name) fs = rv._registry.get_file_system(uri, search_plugins=False) local_path = download_if_needed(uri, plugin_path,", "self.experiment_runners: raise PluginError('ExperimentRunner already registered for ' 'key {}'.format(runner_key)) self.experiment_runners[runner_key]", "that are currently loaded in the registry. \"\"\" return PluginConfigMsg(", "for this plugin. \"\"\" if command_type in self.command_config_builders: raise PluginError(", "for plugins that are currently loaded in the registry. \"\"\"", "from getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self, plugin_modules): if", "if command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self, provider_class): \"\"\"Registers a RasterSourceDefaultProvider for", "self.plugin_modules = plugin_modules def _load_plugin(self, plugin, identifier): # Check the", "The Config group, e.g. rv.BACKEND, rv.TASK. key - The key", "self.default_label_stores = [] self.default_evaluators = [] self.experiment_runners = {} self.filesystems", "Lists should be comma separated. This takes into account that", "of values will be transformed to # having a list-like", "it is not callable'.format(identifier)) # TODO: Log loading plugin. register_method(self)", "rv.BACKEND, rv.TASK. key - The key used for this plugin.", "passed to plugins in a call to their \"register_plugin\" method.", "RasterSourceDefaultProvider for use as a plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class):", "self.config_builders = {} self.command_config_builders = {} self.commands = [] self.aux_command_classes", "plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self, group, key, builder_class): \"\"\"Registers a ConfigBuilder", "CommandConfig for this plugin. \"\"\" if command_type in self.command_config_builders: raise", "not hasattr(plugin, 'register_plugin'): raise PluginError('Plugin at {} does not have", "from the config. Lists should be comma separated. This takes", "key) in self.config_builders: raise PluginError('ConfigBuilder already registered for group '", "if (group, key) in self.config_builders: raise PluginError('ConfigBuilder already registered for", "for use as a plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class): \"\"\"Registers", "multi-value lists. \"\"\" try: # A comma separated list of", "in the registry. \"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self,", "self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class): \"\"\"Registers a LabelSourceDefaultProvider for use as", "if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder already registered for", "valid if not hasattr(plugin, 'register_plugin'): raise PluginError('Plugin at {} does", "'\"register_plugin\" attribute, ' 'but it is not callable'.format(identifier)) # TODO:", "default='[]')) self._load_from_files(plugin_files) self.plugin_files = plugin_files plugin_modules = load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules)", "if not plugin_paths: return self.plugin_sources = [] plugin_base = PluginBase(package='rastervision.plugins')", "# to keep it from getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri)", "for group ' '{} and key {}'.format(group, key)) self.config_builders[(group, key)]", "Replacing # single quotes with double quotes lets us parse", "a plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class): \"\"\"Registers a LabelSourceDefaultProvider for", "for use as a plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class): \"\"\"Registers", "list( set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns a", "plugin. Args: command_type - The key used for this plugin.", "of ConfigBuilder that builds the Config for this plugin. \"\"\"", "at {} has a ' '\"register_plugin\" attribute, ' 'but it", "searchpath=[local_dir]) # We're required to hang onto the source #", "search_plugins=False) local_path = download_if_needed(uri, plugin_path, fs=fs) local_dir = os.path.dirname(local_path) plugin_source", "- The subclass of ConfigBuilder that builds the Config for", "for this plugin. \"\"\" if (group, key) in self.config_builders: raise", "{}'.format(command_type)) self.command_config_builders[command_type] = builder_class self.commands.append(command_type) def register_aux_command(self, command_type, command_class): \"\"\"Registers", "return list(map(lambda x: x.strip(), s.split(','))) class PluginRegistry: @staticmethod def get_instance():", "self.filesystems = [] plugin_files = load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files =", "register_default_label_source(self, provider_class): \"\"\"Registers a LabelSourceDefaultProvider for use as a plugin.\"\"\"", "{} self.command_config_builders = {} self.commands = [] self.aux_command_classes = {}", "PluginConfigMsg from rastervision.utils.files import download_if_needed class PluginError(Exception): pass def load_conf_list(s):", "import json import importlib from pluginbase import PluginBase import rastervision", "the registry. \"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self, group,", "rv_home): \"\"\"Initializes this plugin registry. A plugin registry is passed", "already registered for command' 'with type {}'.format(command_type)) self.command_config_builders[command_type] = builder_class", "for command' 'with type {}'.format(command_type)) self.aux_command_classes[command_type] = command_class if command_class.options.include_by_default:", "is valid if not hasattr(plugin, 'register_plugin'): raise PluginError('Plugin at {}", "self._load_plugin(plugin, module) def add_plugins_from_proto(self, plugin_msg): new_plugin_files = list( set(plugin_msg.plugin_uris) -", "'CommandConfigBuilder is already registered for command' 'with type {}'.format(command_type)) if", "into account that previous versions of Raster Vision allowed for", "def register_command_config_builder(self, command_type, builder_class): \"\"\"Registers a ConfigBuilder as a plugin.", "loading plugin. register_method(self) def _load_from_files(self, plugin_paths): if not plugin_paths: return", "json.JSONDecodeError: return list(map(lambda x: x.strip(), s.split(','))) class PluginRegistry: @staticmethod def", "= [] self.experiment_runners = {} self.filesystems = [] plugin_files =", "everett ConfigManager for the plugin section of the application configuration.", "= os.path.join(rv_home, 'plugins') self.config_builders = {} self.command_config_builders = {} self.commands", "list. return json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError: return list(map(lambda x: x.strip(),", "key {}'.format(group, key)) self.config_builders[(group, key)] = builder_class def register_command_config_builder(self, command_type,", "it as a JSON list. return json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError:", "versions of Raster Vision allowed for a `[ \"module\" ]`", "os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir, plugin_name) fs = rv._registry.get_file_system(uri, search_plugins=False) local_path", "is \"FOO_RUNNER\", then users can use the runner by issuing", "register_config_builder(self, group, key, builder_class): \"\"\"Registers a ConfigBuilder as a plugin.", "The subclass of AuxCommand subclass to register. \"\"\" if command_type", "the builder in a \".builder(key)\" call. command_class - The subclass", "\"\"\" if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder is already", "required to hang onto the source # to keep it", "self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class): \"\"\"Registers a LabelStoreDefaultProvider for use as", "plugin_path = os.path.join(self.plugin_root_dir, plugin_name) fs = rv._registry.get_file_system(uri, search_plugins=False) local_path =", "PluginError( 'CommandConfigBuilder already registered for command' 'with type {}'.format(command_type)) self.command_config_builders[command_type]", "used to reference this plugin runner. This is a string", "self.default_evaluators = [] self.experiment_runners = {} self.filesystems = [] plugin_files", "type {}'.format(command_type)) self.aux_command_classes[command_type] = command_class if command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self,", "\".builder(key)\" call. command_class - The subclass of AuxCommand subclass to", "self.command_config_builders: raise PluginError( 'CommandConfigBuilder is already registered for command' 'with", "TODO: Log loading plugin. register_method(self) def _load_from_files(self, plugin_paths): if not", "A comma separated list of values will be transformed to", "return for module in plugin_modules: plugin = importlib.import_module(module) self._load_plugin(plugin, module)", "from rastervision.utils.files import download_if_needed class PluginError(Exception): pass def load_conf_list(s): \"\"\"Loads", "items from the config. Lists should be comma separated. This", "self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns a protobuf message that records the", "their \"register_plugin\" method. Args: plugin_config - the everett ConfigManager for", "importlib from pluginbase import PluginBase import rastervision as rv from", "\"\"\" if runner_key in self.experiment_runners: raise PluginError('ExperimentRunner already registered for", "\"\"\"Initializes this plugin registry. A plugin registry is passed to", "configuration. \"\"\" self.plugin_root_dir = os.path.join(rv_home, 'plugins') self.config_builders = {} self.command_config_builders", "The subclass of ConfigBuilder that builds the Config for this", "should be comma separated. This takes into account that previous", "list-like string, with ' instead of \". Replacing # single", "@staticmethod def get_instance(): return rv._registry._get_plugin_registry() def __init__(self, plugin_config, rv_home): \"\"\"Initializes", "comma separated. This takes into account that previous versions of", "= PluginBase(package='rastervision.plugins') for uri in plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path", "The key used for this plugin. This will be used", "plugin. This will be used to construct the builder in", "in self.command_config_builders: raise PluginError( 'CommandConfigBuilder already registered for command' 'with", "self.aux_command_classes = {} self.default_raster_sources = [] self.default_vector_sources = [] self.default_label_sources", "[] self.default_evaluators = [] self.experiment_runners = {} self.filesystems = []", "= load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules def _load_plugin(self, plugin,", "not have ' '\"register_plugin\" method.'.format(identifier)) register_method = getattr(plugin, 'register_plugin') if", "- The key used to reference this plugin runner. This", "run foo_runner ...\" command. runner_class - The class of the", "command_type - The key used for this plugin. This will", "call. builder_class - The subclass of CommandConfigBuilder that builds the", "The subclass of CommandConfigBuilder that builds the CommandConfig for this", "This will be used to construct the builder in a", "not callable(register_method): raise PluginError('Plugin at {} has a ' '\"register_plugin\"", "\"\"\"Registers a ConfigBuilder as a plugin. Args: group - The", "= builder_class def register_command_config_builder(self, command_type, builder_class): \"\"\"Registers a ConfigBuilder as", "as a plugin. Args: command_type - The key used for", "def register_default_vector_source(self, provider_class): \"\"\"Registers a VectorSourceDefaultProvider for use as a", "raise PluginError('ConfigBuilder already registered for group ' '{} and key", "group, e.g. rv.BACKEND, rv.TASK. key - The key used for", "plugins that are currently loaded in the registry. \"\"\" return", "to register. \"\"\" if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder", "Log loading plugin. register_method(self) def _load_from_files(self, plugin_paths): if not plugin_paths:", "This takes into account that previous versions of Raster Vision", "= builder_class self.commands.append(command_type) def register_aux_command(self, command_type, command_class): \"\"\"Registers a custom", "register_default_raster_source(self, provider_class): \"\"\"Registers a RasterSourceDefaultProvider for use as a plugin.\"\"\"", "= load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files = plugin_files plugin_modules = load_conf_list(plugin_config('modules',", "command_class - The subclass of AuxCommand subclass to register. \"\"\"", "of items from the config. Lists should be comma separated.", "'{} and key {}'.format(group, key)) self.config_builders[(group, key)] = builder_class def", "hang onto the source # to keep it from getting", "callable'.format(identifier)) # TODO: Log loading plugin. register_method(self) def _load_from_files(self, plugin_paths):", "key is \"FOO_RUNNER\", then users can use the runner by", "registered for command' 'with type {}'.format(command_type)) if command_type in self.aux_command_classes:", "pluginbase import PluginBase import rastervision as rv from rastervision.protos.plugin_pb2 import", "a plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class): \"\"\"Registers a VectorSourceDefaultProvider for", "module in plugin_modules: plugin = importlib.import_module(module) self._load_plugin(plugin, module) def add_plugins_from_proto(self,", "import download_if_needed class PluginError(Exception): pass def load_conf_list(s): \"\"\"Loads a list", "# having a list-like string, with ' instead of \".", "{}'.format(command_type)) self.aux_command_classes[command_type] = command_class if command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self, provider_class):", "self.commands.append(command_type) def register_default_raster_source(self, provider_class): \"\"\"Registers a RasterSourceDefaultProvider for use as", "an ExperimentRunner as a plugin. Args: runner_key - The key", "use as a plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers", "def to_proto(self): \"\"\"Returns a protobuf message that records the plugin", "a \".builder(key)\" call. builder_class - The subclass of ConfigBuilder that", "plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class): \"\"\"Registers an EvaluatorDefaultProvider for use", "didn't work for multi-value lists. \"\"\" try: # A comma", "except json.JSONDecodeError: return list(map(lambda x: x.strip(), s.split(','))) class PluginRegistry: @staticmethod", "self.command_config_builders = {} self.commands = [] self.aux_command_classes = {} self.default_raster_sources", "\"\"\" try: # A comma separated list of values will", "raise PluginError( 'CommandConfigBuilder is already registered for command' 'with type", "registered for ' 'key {}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class def register_filesystem(self,", "a list-like string, with ' instead of \". Replacing #", "'\"')) except json.JSONDecodeError: return list(map(lambda x: x.strip(), s.split(','))) class PluginRegistry:", "self.experiment_runners[runner_key] = runner_class def register_filesystem(self, filesystem_class): \"\"\"Registers a FileSystem as", "def _load_plugin(self, plugin, identifier): # Check the plugin is valid", "def register_filesystem(self, filesystem_class): \"\"\"Registers a FileSystem as a plugin.\"\"\" self.filesystems.append(filesystem_class)", "that previous versions of Raster Vision allowed for a `[", "self.command_config_builders[command_type] = builder_class self.commands.append(command_type) def register_aux_command(self, command_type, command_class): \"\"\"Registers a", "' '\"register_plugin\" method.'.format(identifier)) register_method = getattr(plugin, 'register_plugin') if not callable(register_method):", "self.default_label_sources = [] self.default_label_stores = [] self.default_evaluators = [] self.experiment_runners", "already registered for group ' '{} and key {}'.format(group, key))", "command_class): \"\"\"Registers a custom AuxCommand as a plugin. Args: command_type", "JSON list. return json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError: return list(map(lambda x:", "{} does not have ' '\"register_plugin\" method.'.format(identifier)) register_method = getattr(plugin,", "plugins in a call to their \"register_plugin\" method. Args: plugin_config", "provider_class): \"\"\"Registers a LabelStoreDefaultProvider for use as a plugin.\"\"\" self.default_label_stores.append(provider_class)", "PluginError('Plugin at {} does not have ' '\"register_plugin\" method.'.format(identifier)) register_method", "the application configuration. \"\"\" self.plugin_root_dir = os.path.join(rv_home, 'plugins') self.config_builders =", "rastervision.utils.files import download_if_needed class PluginError(Exception): pass def load_conf_list(s): \"\"\"Loads a", "to hang onto the source # to keep it from", "raise PluginError( 'CommandConfigBuilder already registered for command' 'with type {}'.format(command_type))", "load_conf_list(s): \"\"\"Loads a list of items from the config. Lists", "for the plugin section of the application configuration. \"\"\" self.plugin_root_dir", "_load_plugin(self, plugin, identifier): # Check the plugin is valid if", "ConfigBuilder that builds the Config for this plugin. \"\"\" if", "registry is passed to plugins in a call to their", "the builder in a \".builder(key)\" call. builder_class - The subclass", "use as a plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class): \"\"\"Registers an", "registered for command' 'with type {}'.format(command_type)) self.aux_command_classes[command_type] = command_class if", "are currently loaded in the registry. \"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files,", "(group, key) in self.config_builders: raise PluginError('ConfigBuilder already registered for group", "'register_plugin'): raise PluginError('Plugin at {} does not have ' '\"register_plugin\"", "PluginError('Plugin at {} has a ' '\"register_plugin\" attribute, ' 'but", "x: x.strip(), s.split(','))) class PluginRegistry: @staticmethod def get_instance(): return rv._registry._get_plugin_registry()", "in self.config_builders: raise PluginError('ConfigBuilder already registered for group ' '{}", "plugin_base.make_plugin_source( searchpath=[local_dir]) # We're required to hang onto the source", "rv from rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg from rastervision.utils.files import", "this plugin registry. A plugin registry is passed to plugins", "AuxCommand subclass to register. \"\"\" if command_type in self.command_config_builders: raise", "os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir]) # We're required to hang", "plugin section of the application configuration. \"\"\" self.plugin_root_dir = os.path.join(rv_home,", "def register_aux_command(self, command_type, command_class): \"\"\"Registers a custom AuxCommand as a", "rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg from rastervision.utils.files import download_if_needed class", "command' 'with type {}'.format(command_type)) self.command_config_builders[command_type] = builder_class self.commands.append(command_type) def register_aux_command(self,", "raise PluginError('AuxCommand is already registered for command' 'with type {}'.format(command_type))", "provider_class): \"\"\"Registers a RasterSourceDefaultProvider for use as a plugin.\"\"\" self.default_raster_sources.append(provider_class)", "register_aux_command(self, command_type, command_class): \"\"\"Registers a custom AuxCommand as a plugin.", "ExperimentRunner plugin. \"\"\" if runner_key in self.experiment_runners: raise PluginError('ExperimentRunner already", "builder_class): \"\"\"Registers a ConfigBuilder as a plugin. Args: group -", "will be transformed to # having a list-like string, with", "command_class if command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self, provider_class): \"\"\"Registers a RasterSourceDefaultProvider", "' '{} and key {}'.format(group, key)) self.config_builders[(group, key)] = builder_class", "raise PluginError('Plugin at {} has a ' '\"register_plugin\" attribute, '", "this plugin. \"\"\" if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder", "\"\"\" self.plugin_root_dir = os.path.join(rv_home, 'plugins') self.config_builders = {} self.command_config_builders =", "= [] self.default_label_sources = [] self.default_label_stores = [] self.default_evaluators =", "as a plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class): \"\"\"Registers an EvaluatorDefaultProvider", "a ' '\"register_plugin\" attribute, ' 'but it is not callable'.format(identifier))", "PluginError('ExperimentRunner already registered for ' 'key {}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class", "plugin sources for plugins that are currently loaded in the", "plugin_paths: return self.plugin_sources = [] plugin_base = PluginBase(package='rastervision.plugins') for uri", "if runner_key in self.experiment_runners: raise PluginError('ExperimentRunner already registered for '", "key used for this plugin. This will be used to", "a list of items from the config. Lists should be", "as a plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class): \"\"\"Registers a LabelSourceDefaultProvider", "\"\"\"Loads a list of items from the config. Lists should", "plugin_msg): new_plugin_files = list( set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules", "will be used to construct the builder in a \".builder(key)\"", "def add_plugins_from_proto(self, plugin_msg): new_plugin_files = list( set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files)", "return json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError: return list(map(lambda x: x.strip(), s.split(',')))", "def register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers an ExperimentRunner as a plugin.", "plugin.\"\"\" self.default_vector_sources.append(provider_class) def register_default_label_source(self, provider_class): \"\"\"Registers a LabelSourceDefaultProvider for use", "- The subclass of AuxCommand subclass to register. \"\"\" if", "sources for plugins that are currently loaded in the registry.", "command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder already registered for command'", "in a \".builder(key)\" call. command_class - The subclass of AuxCommand", "registry. \"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self, group, key,", "for module in plugin_modules: plugin = importlib.import_module(module) self._load_plugin(plugin, module) def", "key)) self.config_builders[(group, key)] = builder_class def register_command_config_builder(self, command_type, builder_class): \"\"\"Registers", "\"\"\"Registers a VectorSourceDefaultProvider for use as a plugin.\"\"\" self.default_vector_sources.append(provider_class) def", "plugin. \"\"\" if (group, key) in self.config_builders: raise PluginError('ConfigBuilder already", "{} self.commands = [] self.aux_command_classes = {} self.default_raster_sources = []", "= [] self.default_vector_sources = [] self.default_label_sources = [] self.default_label_stores =", "import PluginConfig as PluginConfigMsg from rastervision.utils.files import download_if_needed class PluginError(Exception):", "= {} self.default_raster_sources = [] self.default_vector_sources = [] self.default_label_sources =", "values will be transformed to # having a list-like string,", "= os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir]) # We're required to", "pass def load_conf_list(s): \"\"\"Loads a list of items from the", "currently loaded in the registry. \"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules)", "us parse it as a JSON list. return json.loads(s.replace(\"'\", '\"'))", "a plugin. Args: group - The Config group, e.g. rv.BACKEND,", "string, with ' instead of \". Replacing # single quotes", "of the ExperimentRunner plugin. \"\"\" if runner_key in self.experiment_runners: raise", "lists. \"\"\" try: # A comma separated list of values", "self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self, plugin_modules): if not plugin_modules: return", "'plugins') self.config_builders = {} self.command_config_builders = {} self.commands = []", "call to their \"register_plugin\" method. Args: plugin_config - the everett", "def register_default_evaluator(self, provider_class): \"\"\"Registers an EvaluatorDefaultProvider for use as a", "{}'.format(group, key)) self.config_builders[(group, key)] = builder_class def register_command_config_builder(self, command_type, builder_class):", "rastervision as rv from rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg from", "the CommandConfig for this plugin. \"\"\" if command_type in self.command_config_builders:", "s.split(','))) class PluginRegistry: @staticmethod def get_instance(): return rv._registry._get_plugin_registry() def __init__(self,", "runner; e.g. if the key is \"FOO_RUNNER\", then users can", "is passed to plugins in a call to their \"register_plugin\"", "self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules = list( set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules)", "plugin_files plugin_modules = load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules def", "for uri in plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir,", "use as a plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class): \"\"\"Registers a", "class of the ExperimentRunner plugin. \"\"\" if runner_key in self.experiment_runners:", "PluginError(Exception): pass def load_conf_list(s): \"\"\"Loads a list of items from", "that builds the CommandConfig for this plugin. \"\"\" if command_type", "'with type {}'.format(command_type)) self.aux_command_classes[command_type] = command_class if command_class.options.include_by_default: self.commands.append(command_type) def", "keep it from getting GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self,", "def _load_from_files(self, plugin_paths): if not plugin_paths: return self.plugin_sources = []", "= plugin_modules def _load_plugin(self, plugin, identifier): # Check the plugin", "[] self.default_vector_sources = [] self.default_label_sources = [] self.default_label_stores = []", "section of the application configuration. \"\"\" self.plugin_root_dir = os.path.join(rv_home, 'plugins')", "of CommandConfigBuilder that builds the CommandConfig for this plugin. \"\"\"", "os.path.join(rv_home, 'plugins') self.config_builders = {} self.command_config_builders = {} self.commands =", "CommandConfigBuilder that builds the CommandConfig for this plugin. \"\"\" if", "json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError: return list(map(lambda x: x.strip(), s.split(','))) class", "ConfigBuilder as a plugin. Args: group - The Config group,", "\"\"\"Registers an EvaluatorDefaultProvider for use as a plugin.\"\"\" self.default_evaluators.append(provider_class) def", "'key {}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class def register_filesystem(self, filesystem_class): \"\"\"Registers a", "self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers an ExperimentRunner as a", "\"\"\"Returns a protobuf message that records the plugin sources for", "plugin_base = PluginBase(package='rastervision.plugins') for uri in plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0]", "onto the source # to keep it from getting GC'd.", "builder in a \".builder(key)\" call. builder_class - The subclass of", "comma separated list of values will be transformed to #", "use as a plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class): \"\"\"Registers a", "This is a string that will match the command line", "not callable'.format(identifier)) # TODO: Log loading plugin. register_method(self) def _load_from_files(self,", "used to construct the builder in a \".builder(key)\" call. builder_class", "plugin. \"\"\" if runner_key in self.experiment_runners: raise PluginError('ExperimentRunner already registered", "= importlib.import_module(module) self._load_plugin(plugin, module) def add_plugins_from_proto(self, plugin_msg): new_plugin_files = list(", "the runner by issuing a \"rastervision run foo_runner ...\" command.", "to # having a list-like string, with ' instead of", "for command' 'with type {}'.format(command_type)) if command_type in self.aux_command_classes: raise", "does not have ' '\"register_plugin\" method.'.format(identifier)) register_method = getattr(plugin, 'register_plugin')", "construct the builder in a \".builder(key)\" call. builder_class - The", "Args: runner_key - The key used to reference this plugin", "the plugin section of the application configuration. \"\"\" self.plugin_root_dir =", "importlib.import_module(module) self._load_plugin(plugin, module) def add_plugins_from_proto(self, plugin_msg): new_plugin_files = list( set(plugin_msg.plugin_uris)", "self.config_builders: raise PluginError('ConfigBuilder already registered for group ' '{} and", "'with type {}'.format(command_type)) if command_type in self.aux_command_classes: raise PluginError('AuxCommand is", "is a string that will match the command line argument", "runner_class): \"\"\"Registers an ExperimentRunner as a plugin. Args: runner_key -", "for this plugin. This will be used to construct the", "account that previous versions of Raster Vision allowed for a", "]` like syntax, even though that didn't work for multi-value", "PluginBase(package='rastervision.plugins') for uri in plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path =", "from rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg from rastervision.utils.files import download_if_needed", "in a call to their \"register_plugin\" method. Args: plugin_config -", "a \".builder(key)\" call. builder_class - The subclass of CommandConfigBuilder that", "though that didn't work for multi-value lists. \"\"\" try: #", "\"\"\"Registers a LabelSourceDefaultProvider for use as a plugin.\"\"\" self.default_label_sources.append(provider_class) def", "already registered for ' 'key {}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class def", "already registered for command' 'with type {}'.format(command_type)) if command_type in", "runner_key in self.experiment_runners: raise PluginError('ExperimentRunner already registered for ' 'key", "- set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns a protobuf message", "that records the plugin sources for plugins that are currently", "= rv._registry.get_file_system(uri, search_plugins=False) local_path = download_if_needed(uri, plugin_path, fs=fs) local_dir =", "GC'd. self.plugin_sources.append(plugin_source) self._load_plugin(plugin_source.load_plugin(plugin_name), uri) def _load_from_modules(self, plugin_modules): if not plugin_modules:", "plugin. register_method(self) def _load_from_files(self, plugin_paths): if not plugin_paths: return self.plugin_sources", "even though that didn't work for multi-value lists. \"\"\" try:", "that builds the Config for this plugin. \"\"\" if (group,", "self.plugin_sources = [] plugin_base = PluginBase(package='rastervision.plugins') for uri in plugin_paths:", "plugin_name) fs = rv._registry.get_file_system(uri, search_plugins=False) local_path = download_if_needed(uri, plugin_path, fs=fs)", "set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules = list( set(plugin_msg.plugin_modules) -", "method. Args: plugin_config - the everett ConfigManager for the plugin", "\"\"\"Registers a ConfigBuilder as a plugin. Args: command_type - The", "runner_class - The class of the ExperimentRunner plugin. \"\"\" if", "def __init__(self, plugin_config, rv_home): \"\"\"Initializes this plugin registry. A plugin", "plugin, identifier): # Check the plugin is valid if not", "as a plugin. Args: group - The Config group, e.g.", "# A comma separated list of values will be transformed", "plugin_paths): if not plugin_paths: return self.plugin_sources = [] plugin_base =", "local_dir = os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir]) # We're required", "self.plugin_files.extend(new_plugin_files) new_plugin_modules = list( set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def", "def register_config_builder(self, group, key, builder_class): \"\"\"Registers a ConfigBuilder as a", "argument used to reference this runner; e.g. if the key", "PluginError( 'CommandConfigBuilder is already registered for command' 'with type {}'.format(command_type))", "for a `[ \"module\" ]` like syntax, even though that", "if command_type in self.aux_command_classes: raise PluginError('AuxCommand is already registered for", "can use the runner by issuing a \"rastervision run foo_runner", "as a plugin.\"\"\" self.default_raster_sources.append(provider_class) def register_default_vector_source(self, provider_class): \"\"\"Registers a VectorSourceDefaultProvider", "previous versions of Raster Vision allowed for a `[ \"module\"", "a plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers an ExperimentRunner", "LabelStoreDefaultProvider for use as a plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class):", "separated. This takes into account that previous versions of Raster", "try: # A comma separated list of values will be", "'register_plugin') if not callable(register_method): raise PluginError('Plugin at {} has a", "already registered for command' 'with type {}'.format(command_type)) self.aux_command_classes[command_type] = command_class", "the plugin is valid if not hasattr(plugin, 'register_plugin'): raise PluginError('Plugin", "PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self, group, key, builder_class): \"\"\"Registers a", "self.command_config_builders: raise PluginError( 'CommandConfigBuilder already registered for command' 'with type", "Check the plugin is valid if not hasattr(plugin, 'register_plugin'): raise", "for use as a plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key, runner_class):", "PluginError('ConfigBuilder already registered for group ' '{} and key {}'.format(group,", "plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers an ExperimentRunner as", "= plugin_base.make_plugin_source( searchpath=[local_dir]) # We're required to hang onto the", "match the command line argument used to reference this runner;", "then users can use the runner by issuing a \"rastervision", "# single quotes with double quotes lets us parse it", "{}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class def register_filesystem(self, filesystem_class): \"\"\"Registers a FileSystem", "builder_class - The subclass of ConfigBuilder that builds the Config", "as a plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key, runner_class): \"\"\"Registers an", "parse it as a JSON list. return json.loads(s.replace(\"'\", '\"')) except", "call. command_class - The subclass of AuxCommand subclass to register.", "AuxCommand as a plugin. Args: command_type - The key used", "raise PluginError('ExperimentRunner already registered for ' 'key {}'.format(runner_key)) self.experiment_runners[runner_key] =", "single quotes with double quotes lets us parse it as", "import os import json import importlib from pluginbase import PluginBase", "fs = rv._registry.get_file_system(uri, search_plugins=False) local_path = download_if_needed(uri, plugin_path, fs=fs) local_dir", "= {} self.command_config_builders = {} self.commands = [] self.aux_command_classes =", "to_proto(self): \"\"\"Returns a protobuf message that records the plugin sources", "that didn't work for multi-value lists. \"\"\" try: # A", "builder in a \".builder(key)\" call. command_class - The subclass of", "[] plugin_files = load_conf_list(plugin_config('files', default='[]')) self._load_from_files(plugin_files) self.plugin_files = plugin_files plugin_modules", "self._load_from_files(plugin_files) self.plugin_files = plugin_files plugin_modules = load_conf_list(plugin_config('modules', default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules", "to plugins in a call to their \"register_plugin\" method. Args:", "like syntax, even though that didn't work for multi-value lists.", "- The subclass of CommandConfigBuilder that builds the CommandConfig for", "{}'.format(command_type)) if command_type in self.aux_command_classes: raise PluginError('AuxCommand is already registered", "command_type in self.aux_command_classes: raise PluginError('AuxCommand is already registered for command'", "_load_from_files(self, plugin_paths): if not plugin_paths: return self.plugin_sources = [] plugin_base", "new_plugin_modules = list( set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self):", "register_method(self) def _load_from_files(self, plugin_paths): if not plugin_paths: return self.plugin_sources =", "builds the CommandConfig for this plugin. \"\"\" if command_type in", "self.commands = [] self.aux_command_classes = {} self.default_raster_sources = [] self.default_vector_sources", "issuing a \"rastervision run foo_runner ...\" command. runner_class - The", "not plugin_paths: return self.plugin_sources = [] plugin_base = PluginBase(package='rastervision.plugins') for", "__init__(self, plugin_config, rv_home): \"\"\"Initializes this plugin registry. A plugin registry", "self.plugin_root_dir = os.path.join(rv_home, 'plugins') self.config_builders = {} self.command_config_builders = {}", "[] self.default_label_stores = [] self.default_evaluators = [] self.experiment_runners = {}", "allowed for a `[ \"module\" ]` like syntax, even though", "as rv from rastervision.protos.plugin_pb2 import PluginConfig as PluginConfigMsg from rastervision.utils.files", "syntax, even though that didn't work for multi-value lists. \"\"\"", "command' 'with type {}'.format(command_type)) self.aux_command_classes[command_type] = command_class if command_class.options.include_by_default: self.commands.append(command_type)", "a plugin.\"\"\" self.default_label_stores.append(provider_class) def register_default_evaluator(self, provider_class): \"\"\"Registers an EvaluatorDefaultProvider for", "at {} does not have ' '\"register_plugin\" method.'.format(identifier)) register_method =", "plugin_path, fs=fs) local_dir = os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir]) #", "of \". Replacing # single quotes with double quotes lets", "plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class): \"\"\"Registers a LabelStoreDefaultProvider for use", "{} has a ' '\"register_plugin\" attribute, ' 'but it is", "used to reference this runner; e.g. if the key is", "plugin_modules): if not plugin_modules: return for module in plugin_modules: plugin", "message that records the plugin sources for plugins that are", "runner_class def register_filesystem(self, filesystem_class): \"\"\"Registers a FileSystem as a plugin.\"\"\"", "# We're required to hang onto the source # to", "\"\"\"Registers an ExperimentRunner as a plugin. Args: runner_key - The", "records the plugin sources for plugins that are currently loaded", "application configuration. \"\"\" self.plugin_root_dir = os.path.join(rv_home, 'plugins') self.config_builders = {}", "PluginBase import rastervision as rv from rastervision.protos.plugin_pb2 import PluginConfig as", "new_plugin_files = list( set(plugin_msg.plugin_uris) - set(self.plugin_files)) self._load_from_files(new_plugin_files) self.plugin_files.extend(new_plugin_files) new_plugin_modules =", "register. \"\"\" if command_type in self.command_config_builders: raise PluginError( 'CommandConfigBuilder is", "command_type, builder_class): \"\"\"Registers a ConfigBuilder as a plugin. Args: command_type", "= getattr(plugin, 'register_plugin') if not callable(register_method): raise PluginError('Plugin at {}", "fs=fs) local_dir = os.path.dirname(local_path) plugin_source = plugin_base.make_plugin_source( searchpath=[local_dir]) # We're", "# TODO: Log loading plugin. register_method(self) def _load_from_files(self, plugin_paths): if", "command. runner_class - The class of the ExperimentRunner plugin. \"\"\"", "for multi-value lists. \"\"\" try: # A comma separated list", "e.g. if the key is \"FOO_RUNNER\", then users can use", "runner. This is a string that will match the command", "= list( set(plugin_msg.plugin_modules) - set(self.plugin_modules)) self._load_from_modules(new_plugin_modules) self.plugin_modules.extend(new_plugin_modules) def to_proto(self): \"\"\"Returns", "as a JSON list. return json.loads(s.replace(\"'\", '\"')) except json.JSONDecodeError: return", "has a ' '\"register_plugin\" attribute, ' 'but it is not", "getattr(plugin, 'register_plugin') if not callable(register_method): raise PluginError('Plugin at {} has", "uri in plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir, plugin_name)", "this plugin. \"\"\" if (group, key) in self.config_builders: raise PluginError('ConfigBuilder", "register_default_vector_source(self, provider_class): \"\"\"Registers a VectorSourceDefaultProvider for use as a plugin.\"\"\"", "default='[]')) self._load_from_modules(plugin_modules) self.plugin_modules = plugin_modules def _load_plugin(self, plugin, identifier): #", "lets us parse it as a JSON list. return json.loads(s.replace(\"'\",", "method.'.format(identifier)) register_method = getattr(plugin, 'register_plugin') if not callable(register_method): raise PluginError('Plugin", "that will match the command line argument used to reference", "plugin. Args: runner_key - The key used to reference this", "key, builder_class): \"\"\"Registers a ConfigBuilder as a plugin. Args: group", "import PluginBase import rastervision as rv from rastervision.protos.plugin_pb2 import PluginConfig", "in plugin_paths: plugin_name = os.path.splitext(os.path.basename(uri))[0] plugin_path = os.path.join(self.plugin_root_dir, plugin_name) fs", "= {} self.commands = [] self.aux_command_classes = {} self.default_raster_sources =", "in self.experiment_runners: raise PluginError('ExperimentRunner already registered for ' 'key {}'.format(runner_key))", "in self.command_config_builders: raise PluginError( 'CommandConfigBuilder is already registered for command'", "\"\"\" return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self, group, key, builder_class):", "EvaluatorDefaultProvider for use as a plugin.\"\"\" self.default_evaluators.append(provider_class) def register_experiment_runner(self, runner_key,", "as a plugin.\"\"\" self.default_label_sources.append(provider_class) def register_default_label_store(self, provider_class): \"\"\"Registers a LabelStoreDefaultProvider", "return PluginConfigMsg( plugin_uris=self.plugin_files, plugin_modules=self.plugin_modules) def register_config_builder(self, group, key, builder_class): \"\"\"Registers", "command_class.options.include_by_default: self.commands.append(command_type) def register_default_raster_source(self, provider_class): \"\"\"Registers a RasterSourceDefaultProvider for use", "for ' 'key {}'.format(runner_key)) self.experiment_runners[runner_key] = runner_class def register_filesystem(self, filesystem_class):", "not plugin_modules: return for module in plugin_modules: plugin = importlib.import_module(module)", "builds the Config for this plugin. \"\"\" if (group, key)" ]
[ "from torch.autograd import Variable import functools from . import net_blocks", "<filename>acsm/nnutils/resunet.py from __future__ import absolute_import from __future__ import division from", "def reinit_weights(self, ): self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def", "inner_nc, output_nc, kernel_size=3, stride=1, padding=1, bias=True) decoder.append(up) self.decoder = nn.Sequential(*decoder)", "): self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def __init__(self, n_blocks):", "__future__ import print_function from absl import app from absl import", "nn.Conv2d( inner_nc, output_nc, kernel_size=3, stride=1, padding=1, bias=True) decoder.append(up) self.decoder =", "nn from torch.autograd import Variable import functools from . import", "= 512 nlayers = 5 for lx in range(nlayers): outnc", "= self.resnet.maxpool(x) if n_blocks >= 1: x = self.resnet.layer1(x) if", "nn.Sequential(*decoder) nb.net_init(self.decoder) return def forward(self, input): img_enc = self.encoder(input) img_dec", "in range(nlayers): outnc = max(inner_nc // 2, 16) up =", "from __future__ import division from __future__ import print_function from absl", "absolute_import from __future__ import division from __future__ import print_function from", "padding=1, bias=True) decoder.append(up) self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder) return def forward(self,", "1: x = self.resnet.layer1(x) if n_blocks >= 2: x =", "= self.encoder(input) img_dec = self.decoder(img_enc) return img_dec def reinit_weights(self, ):", "self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder) return def forward(self, input): img_enc =", "__init__(self, n_blocks): super(ResnetEncoder, self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks", "nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc = outnc up = nn.Conv2d( inner_nc,", "n_blocks == 3: inner_nc = 256 nlayers = 4 elif", "x = self.resnet.bn1(x) x = self.resnet.relu(x) x = self.resnet.maxpool(x) if", "5 for lx in range(nlayers): outnc = max(inner_nc // 2,", "range(nlayers): outnc = max(inner_nc // 2, 16) up = nb.upconv2d(inner_nc,", "torch.nn as nn from torch.autograd import Variable import functools from", "= [] if n_blocks == 3: inner_nc = 256 nlayers", "== 3: inner_nc = 256 nlayers = 4 elif n_blocks", "n_blocks >= 1: x = self.resnet.layer1(x) if n_blocks >= 2:", "import os import os.path as osp import numpy as np", "import torchvision import torch.nn as nn from torch.autograd import Variable", "= nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc = outnc up = nn.Conv2d(", "input): img_enc = self.encoder(input) img_dec = self.decoder(img_enc) return img_dec def", "x = self.resnet.layer3(x) if n_blocks >= 4: x = self.resnet.layer4(x)", "= self.resnet.layer3(x) if n_blocks >= 4: x = self.resnet.layer4(x) return", "division from __future__ import print_function from absl import app from", "return def forward(self, input): img_enc = self.encoder(input) img_dec = self.decoder(img_enc)", "__future__ import division from __future__ import print_function from absl import", "decoder.append(up) inner_nc = outnc up = nn.Conv2d( inner_nc, output_nc, kernel_size=3,", "= n_blocks decoder = [] if n_blocks == 3: inner_nc", "x): n_blocks = self.n_blocks x = self.resnet.conv1(x) x = self.resnet.bn1(x)", "decoder = [] if n_blocks == 3: inner_nc = 256", "inner_nc = 512 nlayers = 5 for lx in range(nlayers):", "def __init__(self, input_nc, output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder =", "= 4 elif n_blocks == 4: inner_nc = 512 nlayers", "outnc = max(inner_nc // 2, 16) up = nb.upconv2d(inner_nc, outnc)", "x = self.resnet.relu(x) x = self.resnet.maxpool(x) if n_blocks >= 1:", "ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def __init__(self, n_blocks): super(ResnetEncoder, self).__init__() self.resnet", "input_nc, output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks", "[] if n_blocks == 3: inner_nc = 256 nlayers =", "torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks def forward(self, x): n_blocks = self.n_blocks", "n_blocks == 4: inner_nc = 512 nlayers = 5 for", "if n_blocks >= 1: x = self.resnet.layer1(x) if n_blocks >=", "img_dec def reinit_weights(self, ): self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module):", "ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks decoder = [] if n_blocks ==", "self.resnet.layer1(x) if n_blocks >= 2: x = self.resnet.layer2(x) if n_blocks", "= self.resnet.bn1(x) x = self.resnet.relu(x) x = self.resnet.maxpool(x) if n_blocks", "output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks =", "nb import pdb class ResNetConcatGenerator(nn.Module): def __init__(self, input_nc, output_nc, n_blocks=3,", "self.resnet.layer3(x) if n_blocks >= 4: x = self.resnet.layer4(x) return x", "= nn.Conv2d( inner_nc, output_nc, kernel_size=3, stride=1, padding=1, bias=True) decoder.append(up) self.decoder", "= max(inner_nc // 2, 16) up = nb.upconv2d(inner_nc, outnc) decoder.append(up)", "__init__(self, input_nc, output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks)", "self.resnet.conv1(x) x = self.resnet.bn1(x) x = self.resnet.relu(x) x = self.resnet.maxpool(x)", "if n_blocks >= 3: x = self.resnet.layer3(x) if n_blocks >=", "import flags import os import os.path as osp import numpy", "= ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def __init__(self, n_blocks): super(ResnetEncoder, self).__init__()", "import torch.nn as nn from torch.autograd import Variable import functools", "= self.resnet.layer2(x) if n_blocks >= 3: x = self.resnet.layer3(x) if", "os.path as osp import numpy as np import torch import", "torch.autograd import Variable import functools from . import net_blocks as", "self.resnet.bn1(x) x = self.resnet.relu(x) x = self.resnet.maxpool(x) if n_blocks >=", "img_enc = self.encoder(input) img_dec = self.decoder(img_enc) return img_dec def reinit_weights(self,", "import net_blocks as nb import pdb class ResNetConcatGenerator(nn.Module): def __init__(self,", "stride=1, padding=1, bias=True) decoder.append(up) self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder) return def", "elif n_blocks == 4: inner_nc = 512 nlayers = 5", "n_blocks def forward(self, x): n_blocks = self.n_blocks x = self.resnet.conv1(x)", "n_blocks >= 2: x = self.resnet.layer2(x) if n_blocks >= 3:", "from __future__ import print_function from absl import app from absl", "np import torch import torchvision import torch.nn as nn from", "from absl import app from absl import flags import os", "bias=True) decoder.append(up) self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder) return def forward(self, input):", "nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def __init__(self, n_blocks): super(ResnetEncoder, self).__init__() self.resnet =", "self.n_blocks = n_blocks decoder = [] if n_blocks == 3:", "up = nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc = outnc up =", "max(inner_nc // 2, 16) up = nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc", "4: inner_nc = 512 nlayers = 5 for lx in", "ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks decoder", "self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks def forward(self, x):", "reinit_weights(self, ): self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def __init__(self,", "self.encoder(input) img_dec = self.decoder(img_enc) return img_dec def reinit_weights(self, ): self.encoder", "from __future__ import absolute_import from __future__ import division from __future__", "net_blocks as nb import pdb class ResNetConcatGenerator(nn.Module): def __init__(self, input_nc,", "osp import numpy as np import torch import torchvision import", "self.decoder(img_enc) return img_dec def reinit_weights(self, ): self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder)", "from . import net_blocks as nb import pdb class ResNetConcatGenerator(nn.Module):", "n_blocks >= 3: x = self.resnet.layer3(x) if n_blocks >= 4:", "def __init__(self, n_blocks): super(ResnetEncoder, self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks =", "= n_blocks def forward(self, x): n_blocks = self.n_blocks x =", "flags import os import os.path as osp import numpy as", "self.resnet.relu(x) x = self.resnet.maxpool(x) if n_blocks >= 1: x =", "absl import flags import os import os.path as osp import", "as osp import numpy as np import torch import torchvision", "up = nn.Conv2d( inner_nc, output_nc, kernel_size=3, stride=1, padding=1, bias=True) decoder.append(up)", "img_dec = self.decoder(img_enc) return img_dec def reinit_weights(self, ): self.encoder =", "if n_blocks >= 2: x = self.resnet.layer2(x) if n_blocks >=", "self.resnet.layer2(x) if n_blocks >= 3: x = self.resnet.layer3(x) if n_blocks", "if n_blocks == 3: inner_nc = 256 nlayers = 4", "print_function from absl import app from absl import flags import", "import pdb class ResNetConcatGenerator(nn.Module): def __init__(self, input_nc, output_nc, n_blocks=3, ngf=64,):", "= nn.Sequential(*decoder) nb.net_init(self.decoder) return def forward(self, input): img_enc = self.encoder(input)", "= self.decoder(img_enc) return img_dec def reinit_weights(self, ): self.encoder = ResnetEncoder(n_blocks=self.n_blocks)", ">= 2: x = self.resnet.layer2(x) if n_blocks >= 3: x", "import Variable import functools from . import net_blocks as nb", "== 4: inner_nc = 512 nlayers = 5 for lx", "// 2, 16) up = nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc =", "16) up = nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc = outnc up", ">= 1: x = self.resnet.layer1(x) if n_blocks >= 2: x", "for lx in range(nlayers): outnc = max(inner_nc // 2, 16)", "ResNetConcatGenerator(nn.Module): def __init__(self, input_nc, output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder", "app from absl import flags import os import os.path as", "super(ResNetConcatGenerator, self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks decoder =", "2, 16) up = nb.upconv2d(inner_nc, outnc) decoder.append(up) inner_nc = outnc", "kernel_size=3, stride=1, padding=1, bias=True) decoder.append(up) self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder) return", "= self.n_blocks x = self.resnet.conv1(x) x = self.resnet.bn1(x) x =", "nb.net_init(self.decoder) return def forward(self, input): img_enc = self.encoder(input) img_dec =", "nlayers = 5 for lx in range(nlayers): outnc = max(inner_nc", "Variable import functools from . import net_blocks as nb import", "class ResnetEncoder(nn.Module): def __init__(self, n_blocks): super(ResnetEncoder, self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True)", "= ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks decoder = [] if n_blocks", "import torch import torchvision import torch.nn as nn from torch.autograd", "= torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks def forward(self, x): n_blocks =", "torch import torchvision import torch.nn as nn from torch.autograd import", "return img_dec def reinit_weights(self, ): self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class", "self.n_blocks = n_blocks def forward(self, x): n_blocks = self.n_blocks x", "__future__ import absolute_import from __future__ import division from __future__ import", "super(ResnetEncoder, self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks def forward(self,", "2: x = self.resnet.layer2(x) if n_blocks >= 3: x =", "forward(self, x): n_blocks = self.n_blocks x = self.resnet.conv1(x) x =", "n_blocks decoder = [] if n_blocks == 3: inner_nc =", "from absl import flags import os import os.path as osp", "self.encoder = ResnetEncoder(n_blocks=self.n_blocks) nb.net_init(self.decoder) class ResnetEncoder(nn.Module): def __init__(self, n_blocks): super(ResnetEncoder,", "self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks decoder = []", "import os.path as osp import numpy as np import torch", "os import os.path as osp import numpy as np import", "as nb import pdb class ResNetConcatGenerator(nn.Module): def __init__(self, input_nc, output_nc,", "n_blocks): super(ResnetEncoder, self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks def", "pdb class ResNetConcatGenerator(nn.Module): def __init__(self, input_nc, output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator,", "lx in range(nlayers): outnc = max(inner_nc // 2, 16) up", "self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks = n_blocks def forward(self, x): n_blocks", "= outnc up = nn.Conv2d( inner_nc, output_nc, kernel_size=3, stride=1, padding=1,", "forward(self, input): img_enc = self.encoder(input) img_dec = self.decoder(img_enc) return img_dec", "import numpy as np import torch import torchvision import torch.nn", "n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__() self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks", "torchvision import torch.nn as nn from torch.autograd import Variable import", "512 nlayers = 5 for lx in range(nlayers): outnc =", "decoder.append(up) self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder) return def forward(self, input): img_enc", "= self.resnet.conv1(x) x = self.resnet.bn1(x) x = self.resnet.relu(x) x =", "= self.resnet.layer1(x) if n_blocks >= 2: x = self.resnet.layer2(x) if", "3: inner_nc = 256 nlayers = 4 elif n_blocks ==", "ResnetEncoder(nn.Module): def __init__(self, n_blocks): super(ResnetEncoder, self).__init__() self.resnet = torchvision.models.resnet18(pretrained=True) self.n_blocks", "self.resnet.maxpool(x) if n_blocks >= 1: x = self.resnet.layer1(x) if n_blocks", "= 256 nlayers = 4 elif n_blocks == 4: inner_nc", "outnc up = nn.Conv2d( inner_nc, output_nc, kernel_size=3, stride=1, padding=1, bias=True)", "numpy as np import torch import torchvision import torch.nn as", "class ResNetConcatGenerator(nn.Module): def __init__(self, input_nc, output_nc, n_blocks=3, ngf=64,): super(ResNetConcatGenerator, self).__init__()", "as nn from torch.autograd import Variable import functools from .", "4 elif n_blocks == 4: inner_nc = 512 nlayers =", "= self.resnet.relu(x) x = self.resnet.maxpool(x) if n_blocks >= 1: x", "outnc) decoder.append(up) inner_nc = outnc up = nn.Conv2d( inner_nc, output_nc,", "x = self.resnet.layer2(x) if n_blocks >= 3: x = self.resnet.layer3(x)", "= 5 for lx in range(nlayers): outnc = max(inner_nc //", "functools from . import net_blocks as nb import pdb class", "x = self.resnet.maxpool(x) if n_blocks >= 1: x = self.resnet.layer1(x)", "self.encoder = ResnetEncoder(n_blocks=n_blocks) self.n_blocks = n_blocks decoder = [] if", "256 nlayers = 4 elif n_blocks == 4: inner_nc =", ">= 3: x = self.resnet.layer3(x) if n_blocks >= 4: x", "x = self.resnet.layer1(x) if n_blocks >= 2: x = self.resnet.layer2(x)", "3: x = self.resnet.layer3(x) if n_blocks >= 4: x =", "import functools from . import net_blocks as nb import pdb", "output_nc, kernel_size=3, stride=1, padding=1, bias=True) decoder.append(up) self.decoder = nn.Sequential(*decoder) nb.net_init(self.decoder)", "import app from absl import flags import os import os.path", "import print_function from absl import app from absl import flags", "import division from __future__ import print_function from absl import app", ". import net_blocks as nb import pdb class ResNetConcatGenerator(nn.Module): def", "nlayers = 4 elif n_blocks == 4: inner_nc = 512", "x = self.resnet.conv1(x) x = self.resnet.bn1(x) x = self.resnet.relu(x) x", "def forward(self, x): n_blocks = self.n_blocks x = self.resnet.conv1(x) x", "import absolute_import from __future__ import division from __future__ import print_function", "absl import app from absl import flags import os import", "def forward(self, input): img_enc = self.encoder(input) img_dec = self.decoder(img_enc) return", "inner_nc = 256 nlayers = 4 elif n_blocks == 4:", "n_blocks = self.n_blocks x = self.resnet.conv1(x) x = self.resnet.bn1(x) x", "as np import torch import torchvision import torch.nn as nn", "self.n_blocks x = self.resnet.conv1(x) x = self.resnet.bn1(x) x = self.resnet.relu(x)", "inner_nc = outnc up = nn.Conv2d( inner_nc, output_nc, kernel_size=3, stride=1," ]
[ "= out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi)", "= awkward.util.numpy.absolute(tmp.z) out = (tmp.x < tolerance) out = awkward.util.numpy.bitwise_and(out,", "# and/or other materials provided with the distribution. # #", "__radd__(self, other): return self._vector(operator.add, other, True) def __sub__(self, other): return", "m2) return max(-1.0, min(1.0, r)) def angle(self, other, degrees=False): out", "__rxor__(self, other): return self._scalar(operator.xor, other, True) def __neg__(self): return self._unary(operator.neg)", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "def phi(self): return awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self, other): denom =", "True) def __and__(self, other): return self._scalar(operator.and_, other) def __rand__(self, other):", "must reproduce the above copyright notice, # this list of", "True) def __div__(self, other): return self._scalar(operator.div, other) def __rdiv__(self, other):", "above copyright notice, # this list of conditions and the", "# # Redistribution and use in source and binary forms,", "< tolerance def iscollinear(self, other, tolerance=1e-10): return 1 - awkward.util.numpy.absolute(self.cosdelta(other))", "return self._scalar(operator.divmod, other, True) def __pow__(self, other): if isinstance(other, (numbers.Number,", "other, True) def __mul__(self, other): return self._scalar(operator.mul, other) def __rmul__(self,", "DAMAGE. import math import numbers import operator import awkward import", "other, True) def __rshift__(self, other): return self._scalar(operator.rshift, other) def __rrshift__(self,", "def __xor__(self, other): return self._scalar(operator.xor, other) def __rxor__(self, other): return", "Copyright (c) 2018, DIANA-HEP # All rights reserved. # #", "self._scalar(operator.or_, other) def __ror__(self, other): return self._scalar(operator.or_, other, True) def", "with the distribution. # # * Neither the name of", "math.atan2(self.y, self.x) def cosdelta(self, other): m1 = self.mag2 m2 =", "NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE", "- other.phi + math.pi) % (2*math.pi) - math.pi def isparallel(self,", "awkward.util.numpy.logical_not(mask) out[mask] = 1 return awkward.util.numpy.clip(out, -1, 1) def angle(self,", "without specific prior written permission. # # THIS SOFTWARE IS", "= self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z =", "return self._scalar(operator.and_, other, True) def __or__(self, other): return self._scalar(operator.or_, other)", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "self / self.mag @property def rho(self): out = self.rho2 return", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT", "other) def __rmul__(self, other): return self._scalar(operator.mul, other, True) def __div__(self,", "GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS;", "out = out + self.y*self.y return out def delta_phi(self, other):", "or m2 == 0: return 1.0 r = self.dot(other) /", "#!/usr/bin/env python # Copyright (c) 2018, DIANA-HEP # All rights", "from # this software without specific prior written permission. #", "A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL", "/ self.mag @property def rho(self): return math.sqrt(self.rho2) @property def phi(self):", "* other.mag2 mask = (denom > 0) denom = denom[mask]", "notice, this # list of conditions and the following disclaimer.", "tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out class", "self._scalar(operator.lshift, other, True) def __rshift__(self, other): return self._scalar(operator.rshift, other) def", "if degrees: out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out def isopposite(self,", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "self.rho2 return awkward.util.numpy.sqrt(out) @property def phi(self): return awkward.util.numpy.arctan2(self.y, self.x) def", "endorse or promote products derived from # this software without", "copyright notice, this # list of conditions and the following", "abs(tmp.z) < tolerance def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other)", "degrees: out = out * 180.0/math.pi return out def isopposite(self,", "return self._scalar(operator.mul, other, True) def __div__(self, other): return self._scalar(operator.div, other)", "def __gt__(self, other): raise TypeError(\"spatial vectors have no natural ordering\")", "self.mag2 else: return self.mag2**(0.5*other) else: self._scalar(operator.pow, other) # no __rpow__", "source and binary forms, with or without # modification, are", "awkward.util.numpy.number)): if other == 2: return self.mag2 else: return self.mag2**(0.5*other)", "min(1.0, r)) def angle(self, other, degrees=False): out = math.acos(self.cosdelta(other)) if", "self._scalar(operator.truediv, other) def __rtruediv__(self, other): return self._scalar(operator.truediv, other, True) def", "PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE", "__gt__(self, other): raise TypeError(\"spatial vectors have no natural ordering\") def", "ANY WAY OUT OF THE USE # OF THIS SOFTWARE,", "out def delta_phi(self, other): return (self.phi - other.phi + math.pi)", "def __floordiv__(self, other): return self._scalar(operator.floordiv, other) def __rfloordiv__(self, other): return", "= awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out class Methods(Common): @property", "2: return self.mag2 else: return self.mag2**(0.5*other) else: self._scalar(operator.pow, other) #", "if normal is not None: a = self.unit b =", "isparallel(self, other, tolerance=1e-10): return 1 - self.cosdelta(other) < tolerance def", "TypeError(\"spatial vectors have no natural ordering\") def __le__(self, other): raise", "(tmp.x < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance) out", "out = out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out = awkward.util.numpy.multiply(out,", "other): return self._scalar(operator.mul, other, True) def __div__(self, other): return self._scalar(operator.div,", "isopposite(self, other, tolerance=1e-10): tmp = self + other return abs(tmp.x)", "of source code must retain the above copyright notice, this", "< tolerance) return out def isperpendicular(self, other, tolerance=1e-10): tmp =", "(c) 2018, DIANA-HEP # All rights reserved. # # Redistribution", "SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR", "OF THE POSSIBILITY OF SUCH DAMAGE. import math import numbers", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED.", "return out def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) tmp.x", "other, True) def __pow__(self, other): if isinstance(other, (numbers.Number, awkward.util.numpy.number)): if", "OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR", "AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED", "ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, #", "out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out class Methods(Common):", "+ self.y*self.y return out def delta_phi(self, other): return (self.phi -", "tolerance and abs(tmp.z) < tolerance def isperpendicular(self, other, tolerance=1e-10): tmp", "def isparallel(self, other, tolerance=1e-10): return 1 - self.cosdelta(other) < tolerance", "and binary forms, with or without # modification, are permitted", "r = self.dot(other) / math.sqrt(m1 * m2) return max(-1.0, min(1.0,", "def __mod__(self, other): return self._scalar(operator.mod, other) def __rmod__(self, other): return", "names of its # contributors may be used to endorse", "= out * 180.0/math.pi return out def isopposite(self, other, tolerance=1e-10):", "no natural ordering\") def __ge__(self, other): raise TypeError(\"spatial vectors have", "out = awkward.util.numpy.arccos(self.cosdelta(other)) if normal is not None: a =", "# * Neither the name of the copyright holder nor", "USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "return awkward.util.numpy.sqrt(out) @property def phi(self): return awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self,", "__ge__(self, other): raise TypeError(\"spatial vectors have no natural ordering\") class", "have no natural ordering\") class ArrayMethods(Common): @property def unit(self): return", "self / self.mag @property def rho(self): return math.sqrt(self.rho2) @property def", "BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "source code must retain the above copyright notice, this #", "def __rdivmod__(self, other): return self._scalar(operator.divmod, other, True) def __pow__(self, other):", "return self._scalar(operator.truediv, other) def __rtruediv__(self, other): return self._scalar(operator.truediv, other, True)", "tolerance=1e-10): return 1 - self.cosdelta(other) < tolerance def isantiparallel(self, other,", "def __rfloordiv__(self, other): return self._scalar(operator.floordiv, other, True) def __mod__(self, other):", "= awkward.util.numpy.logical_not(mask) out[mask] = 1 return awkward.util.numpy.clip(out, -1, 1) def", "self.y*self.y return out def delta_phi(self, other): return (self.phi - other.phi", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON", "self._scalar(operator.mod, other, True) def __divmod__(self, other): return self._scalar(operator.divmod, other) def", "isantiparallel(self, other, tolerance=1e-10): return self.cosdelta(other) - (-1) < tolerance def", "__neg__(self): return self._unary(operator.neg) def __pos__(self): return self._unary(operator.pos) def __abs__(self): return", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out def isopposite(self, other, tolerance=1e-10): tmp =", "other): raise TypeError(\"spatial vectors have no natural ordering\") def __ge__(self,", "this software without specific prior written permission. # # THIS", "other): return self._vector(operator.add, other) def __radd__(self, other): return self._vector(operator.add, other,", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "import awkward import awkward.util class Common(object): @property def mag2(self): return", "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "@property def rho(self): out = self.rho2 return awkward.util.numpy.sqrt(out) @property def", "awkward.util.numpy.sqrt(out) @property def phi(self): return awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self, other):", "other): if isinstance(other, (numbers.Number, awkward.util.numpy.number)): if other == 2: return", "def __rdiv__(self, other): return self._scalar(operator.div, other, True) def __truediv__(self, other):", "other, True) def __or__(self, other): return self._scalar(operator.or_, other) def __ror__(self,", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "= self.dot(other) return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance", "WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE", "tolerance=1e-10): return self.cosdelta(other) - (-1) < tolerance def iscollinear(self, other,", "def __rxor__(self, other): return self._scalar(operator.xor, other, True) def __neg__(self): return", "__rrshift__(self, other): return self._scalar(operator.rshift, other, True) def __and__(self, other): return", "True) def __rshift__(self, other): return self._scalar(operator.rshift, other) def __rrshift__(self, other):", "notice, # this list of conditions and the following disclaimer", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES;", "other) # no __rpow__ def __lshift__(self, other): return self._scalar(operator.lshift, other)", "@property def unit(self): return self / self.mag @property def rho(self):", "other): return self._vector(operator.add, other, True) def __sub__(self, other): return self._vector(operator.sub,", "other materials provided with the distribution. # # * Neither", "SUCH DAMAGE. import math import numbers import operator import awkward", "return self._scalar(operator.and_, other) def __rand__(self, other): return self._scalar(operator.and_, other, True)", "and abs(tmp.z) < tolerance def __add__(self, other): return self._vector(operator.add, other)", "phi(self): return math.atan2(self.y, self.x) def cosdelta(self, other): m1 = self.mag2", "software without specific prior written permission. # # THIS SOFTWARE", "raise TypeError(\"spatial vectors have no natural ordering\") def __le__(self, other):", "degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other)) if normal is not None: a", "def __div__(self, other): return self._scalar(operator.div, other) def __rdiv__(self, other): return", "retain the above copyright notice, this # list of conditions", "other): return self._vector(operator.sub, other) def __rsub__(self, other): return self._vector(operator.sub, other,", "(-1) < tolerance def iscollinear(self, other, tolerance=1e-10): return 1 -", "OUT OF THE USE # OF THIS SOFTWARE, EVEN IF", "other): denom = self.mag2 * other.mag2 mask = (denom >", "* Redistributions in binary form must reproduce the above copyright", "def __or__(self, other): return self._scalar(operator.or_, other) def __ror__(self, other): return", "__rdiv__(self, other): return self._scalar(operator.div, other, True) def __truediv__(self, other): return", "OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY", "self._vector(operator.add, other, True) def __sub__(self, other): return self._vector(operator.sub, other) def", "return self._scalar(operator.xor, other, True) def __neg__(self): return self._unary(operator.neg) def __pos__(self):", "natural ordering\") class ArrayMethods(Common): @property def unit(self): return self /", "CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "are met: # # * Redistributions of source code must", "awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z) out = (tmp.x", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "disclaimer in the documentation # and/or other materials provided with", "@property def mag2(self): return self.dot(self) @property def mag(self): return awkward.util.numpy.sqrt(self.mag2)", "\"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "r)) def angle(self, other, degrees=False): out = math.acos(self.cosdelta(other)) if degrees:", "TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "tmp.z = awkward.util.numpy.absolute(tmp.z) out = (tmp.x < tolerance) out =", "CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES,", "self._scalar(operator.xor, other, True) def __neg__(self): return self._unary(operator.neg) def __pos__(self): return", "disclaimer. # # * Redistributions in binary form must reproduce", "m1 == 0 or m2 == 0: return 1.0 r", "other): return self._scalar(operator.divmod, other, True) def __pow__(self, other): if isinstance(other,", "other) def __rrshift__(self, other): return self._scalar(operator.rshift, other, True) def __and__(self,", "cosdelta(self, other): denom = self.mag2 * other.mag2 mask = (denom", "True) def __or__(self, other): return self._scalar(operator.or_, other) def __ror__(self, other):", "name of the copyright holder nor the names of its", "ArrayMethods(Common): @property def unit(self): return self / self.mag @property def", "True) def __pow__(self, other): if isinstance(other, (numbers.Number, awkward.util.numpy.number)): if other", "# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "other.mag2 mask = (denom > 0) denom = denom[mask] denom[:]", "other): return self._scalar(operator.mul, other) def __rmul__(self, other): return self._scalar(operator.mul, other,", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "1 - self.cosdelta(other) < tolerance def isantiparallel(self, other, tolerance=1e-10): return", "tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out def", "# this software without specific prior written permission. # #", "other): return self._scalar(operator.div, other) def __rdiv__(self, other): return self._scalar(operator.div, other,", "provided with the distribution. # # * Neither the name", "= math.acos(self.cosdelta(other)) if degrees: out = out * 180.0/math.pi return", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "def __rmul__(self, other): return self._scalar(operator.mul, other, True) def __div__(self, other):", "__rtruediv__(self, other): return self._scalar(operator.truediv, other, True) def __floordiv__(self, other): return", "self._scalar(operator.rshift, other, True) def __and__(self, other): return self._scalar(operator.and_, other) def", "% (2*math.pi) - math.pi def isparallel(self, other, tolerance=1e-10): return 1", "awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out def isperpendicular(self, other, tolerance=1e-10):", "CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", "Redistributions of source code must retain the above copyright notice,", "abs(tmp.x) < tolerance and abs(tmp.y) < tolerance and abs(tmp.z) <", "tolerance def __add__(self, other): return self._vector(operator.add, other) def __radd__(self, other):", "return self._vector(operator.add, other, True) def __sub__(self, other): return self._vector(operator.sub, other)", "other, tolerance=1e-10): tmp = self + other tmp.x = awkward.util.numpy.absolute(tmp.x)", "awkward import awkward.util class Common(object): @property def mag2(self): return self.dot(self)", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT", "rho(self): out = self.rho2 return awkward.util.numpy.sqrt(out) @property def phi(self): return", "return out class Methods(Common): @property def unit(self): return self /", "no natural ordering\") def __le__(self, other): raise TypeError(\"spatial vectors have", "self.mag @property def rho(self): return math.sqrt(self.rho2) @property def phi(self): return", "out = self.rho2 return awkward.util.numpy.sqrt(out) @property def phi(self): return awkward.util.numpy.arctan2(self.y,", "= self.mag2 m2 = other.mag2 if m1 == 0 or", "max(-1.0, min(1.0, r)) def angle(self, other, degrees=False): out = math.acos(self.cosdelta(other))", "materials provided with the distribution. # # * Neither the", "__lshift__(self, other): return self._scalar(operator.lshift, other) def __rlshift__(self, other): return self._scalar(operator.lshift,", "ordering\") def __ge__(self, other): raise TypeError(\"spatial vectors have no natural", "tolerance=1e-10): tmp = self + other tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y", "and/or other materials provided with the distribution. # # *", "awkward.util.numpy.absolute(tmp.z) out = (tmp.x < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.y", "self._vector(operator.sub, other) def __rsub__(self, other): return self._vector(operator.sub, other, True) def", "documentation # and/or other materials provided with the distribution. #", "other) def __rtruediv__(self, other): return self._scalar(operator.truediv, other, True) def __floordiv__(self,", "__rfloordiv__(self, other): return self._scalar(operator.floordiv, other, True) def __mod__(self, other): return", "other) def __rsub__(self, other): return self._vector(operator.sub, other, True) def __mul__(self,", "that the following conditions are met: # # * Redistributions", "IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE", "or without # modification, are permitted provided that the following", "OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "return out def isopposite(self, other, tolerance=1e-10): tmp = self +", "Redistribution and use in source and binary forms, with or", "code must retain the above copyright notice, this # list", "# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "return self.cosdelta(other) - (-1) < tolerance def iscollinear(self, other, tolerance=1e-10):", "__mod__(self, other): return self._scalar(operator.mod, other) def __rmod__(self, other): return self._scalar(operator.mod,", "return self._unary(operator.pos) def __abs__(self): return self.mag def __invert__(self): return self._unary(operator.invert)", "math.pi def isparallel(self, other, tolerance=1e-10): return 1 - self.cosdelta(other) <", "HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT,", "this # list of conditions and the following disclaimer. #", "self.x*self.x out = out + self.y*self.y return out def delta_phi(self,", "POSSIBILITY OF SUCH DAMAGE. import math import numbers import operator", "__mul__(self, other): return self._scalar(operator.mul, other) def __rmul__(self, other): return self._scalar(operator.mul,", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "other, True) def __div__(self, other): return self._scalar(operator.div, other) def __rdiv__(self,", "= awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z) out = (tmp.x < tolerance)", "other): return self._vector(operator.sub, other, True) def __mul__(self, other): return self._scalar(operator.mul,", "return self._scalar(operator.mod, other) def __rmod__(self, other): return self._scalar(operator.mod, other, True)", "rights reserved. # # Redistribution and use in source and", "# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "other): raise TypeError(\"spatial vectors have no natural ordering\") def __gt__(self,", "/ self.mag @property def rho(self): out = self.rho2 return awkward.util.numpy.sqrt(out)", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING,", "> 0) denom = denom[mask] denom[:] = awkward.util.numpy.sqrt(denom) out =", "__ror__(self, other): return self._scalar(operator.or_, other, True) def __xor__(self, other): return", "other): return self._scalar(operator.and_, other, True) def __or__(self, other): return self._scalar(operator.or_,", "self._scalar(operator.mul, other) def __rmul__(self, other): return self._scalar(operator.mul, other, True) def", "= (tmp.x < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance)", "nor the names of its # contributors may be used", "binary form must reproduce the above copyright notice, # this", "unit(self): return self / self.mag @property def rho(self): return math.sqrt(self.rho2)", "return awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self, other): denom = self.mag2 *", "isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) return abs(tmp.x) < tolerance", "other): return self._scalar(operator.truediv, other) def __rtruediv__(self, other): return self._scalar(operator.truediv, other,", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" #", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY", "awkward.util.numpy.sqrt(self.mag2) @property def rho2(self): out = self.x*self.x out = out", "out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z", "abs(tmp.y) < tolerance and abs(tmp.z) < tolerance def __add__(self, other):", "return self._scalar(operator.xor, other) def __rxor__(self, other): return self._scalar(operator.xor, other, True)", "other) def __rlshift__(self, other): return self._scalar(operator.lshift, other, True) def __rshift__(self,", "other) def __ror__(self, other): return self._scalar(operator.or_, other, True) def __xor__(self,", "raise TypeError(\"spatial vectors have no natural ordering\") def __gt__(self, other):", "numbers import operator import awkward import awkward.util class Common(object): @property", "- self.cosdelta(other) < tolerance def isantiparallel(self, other, tolerance=1e-10): return self.cosdelta(other)", "mask = awkward.util.numpy.logical_not(mask) out[mask] = 1 return awkward.util.numpy.clip(out, -1, 1)", "a = self.unit b = other.unit out = out *", "other, True) def __xor__(self, other): return self._scalar(operator.xor, other) def __rxor__(self,", "no natural ordering\") def __gt__(self, other): raise TypeError(\"spatial vectors have", "__lt__(self, other): raise TypeError(\"spatial vectors have no natural ordering\") def", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL #", "TypeError(\"spatial vectors have no natural ordering\") class ArrayMethods(Common): @property def", "self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z)", "other): return self._scalar(operator.xor, other, True) def __neg__(self): return self._unary(operator.neg) def", "BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF", "__rlshift__(self, other): return self._scalar(operator.lshift, other, True) def __rshift__(self, other): return", "in source and binary forms, with or without # modification,", "= awkward.util.numpy.bitwise_and(out, tmp.y < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z <", "Methods(Common): @property def unit(self): return self / self.mag @property def", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "distribution. # # * Neither the name of the copyright", "permitted provided that the following conditions are met: # #", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING", "tolerance=1e-10): tmp = self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y)", "__pow__(self, other): if isinstance(other, (numbers.Number, awkward.util.numpy.number)): if other == 2:", "math.acos(self.cosdelta(other)) if degrees: out = out * 180.0/math.pi return out", "list of conditions and the following disclaimer. # # *", "return awkward.util.numpy.clip(out, -1, 1) def angle(self, other, normal=None, degrees=False): out", "EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE #", "self.cosdelta(other) < tolerance def isantiparallel(self, other, tolerance=1e-10): return self.cosdelta(other) -", "other.phi + math.pi) % (2*math.pi) - math.pi def isparallel(self, other,", "__truediv__(self, other): return self._scalar(operator.truediv, other) def __rtruediv__(self, other): return self._scalar(operator.truediv,", "tmp.z < tolerance) return out class Methods(Common): @property def unit(self):", "in the documentation # and/or other materials provided with the", "+ math.pi) % (2*math.pi) - math.pi def isparallel(self, other, tolerance=1e-10):", "of conditions and the following disclaimer in the documentation #", "return self._scalar(operator.truediv, other, True) def __floordiv__(self, other): return self._scalar(operator.floordiv, other)", "self._scalar(operator.pow, other) # no __rpow__ def __lshift__(self, other): return self._scalar(operator.lshift,", "ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER", "products derived from # this software without specific prior written", "__rsub__(self, other): return self._vector(operator.sub, other, True) def __mul__(self, other): return", "abs(tmp.z) < tolerance def __add__(self, other): return self._vector(operator.add, other) def", "Common(object): @property def mag2(self): return self.dot(self) @property def mag(self): return", "tolerance) return out def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other)", "1 return awkward.util.numpy.clip(out, -1, 1) def angle(self, other, normal=None, degrees=False):", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND", "form must reproduce the above copyright notice, # this list", "(numbers.Number, awkward.util.numpy.number)): if other == 2: return self.mag2 else: return", "use in source and binary forms, with or without #", "LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "-1, 1) def angle(self, other, normal=None, degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other))", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,", "__rdivmod__(self, other): return self._scalar(operator.divmod, other, True) def __pow__(self, other): if", "def __rshift__(self, other): return self._scalar(operator.rshift, other) def __rrshift__(self, other): return", "0: return 1.0 r = self.dot(other) / math.sqrt(m1 * m2)", "__rshift__(self, other): return self._scalar(operator.rshift, other) def __rrshift__(self, other): return self._scalar(operator.rshift,", "return self._vector(operator.add, other) def __radd__(self, other): return self._vector(operator.add, other, True)", "else: self._scalar(operator.pow, other) # no __rpow__ def __lshift__(self, other): return", "return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def __lt__(self, other): raise", "return self._unary(operator.neg) def __pos__(self): return self._unary(operator.pos) def __abs__(self): return self.mag", "other) def __rxor__(self, other): return self._scalar(operator.xor, other, True) def __neg__(self):", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import", "other, degrees=False): out = math.acos(self.cosdelta(other)) if degrees: out = out", "WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN", "out = self.dot(other) out[mask] /= denom mask = awkward.util.numpy.logical_not(mask) out[mask]", "< tolerance def __add__(self, other): return self._vector(operator.add, other) def __radd__(self,", "True) def __neg__(self): return self._unary(operator.neg) def __pos__(self): return self._unary(operator.pos) def", "self._vector(operator.sub, other, True) def __mul__(self, other): return self._scalar(operator.mul, other) def", "out + self.y*self.y return out def delta_phi(self, other): return (self.phi", "True) def __truediv__(self, other): return self._scalar(operator.truediv, other) def __rtruediv__(self, other):", "and the following disclaimer. # # * Redistributions in binary", "# contributors may be used to endorse or promote products", "(2*math.pi) - math.pi def isparallel(self, other, tolerance=1e-10): return 1 -", "def delta_phi(self, other): return (self.phi - other.phi + math.pi) %", "out = math.acos(self.cosdelta(other)) if degrees: out = out * 180.0/math.pi", "180.0/awkward.util.numpy.pi) return out def isopposite(self, other, tolerance=1e-10): tmp = self", "OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED", "out def isopposite(self, other, tolerance=1e-10): tmp = self + other", "out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return", "= awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out def isperpendicular(self, other,", "__add__(self, other): return self._vector(operator.add, other) def __radd__(self, other): return self._vector(operator.add,", "COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT,", "out = out * 180.0/math.pi return out def isopposite(self, other,", "# # * Redistributions of source code must retain the", "True) def __sub__(self, other): return self._vector(operator.sub, other) def __rsub__(self, other):", "vectors have no natural ordering\") def __gt__(self, other): raise TypeError(\"spatial", "def __rsub__(self, other): return self._vector(operator.sub, other, True) def __mul__(self, other):", "__rpow__ def __lshift__(self, other): return self._scalar(operator.lshift, other) def __rlshift__(self, other):", "return self._vector(operator.sub, other, True) def __mul__(self, other): return self._scalar(operator.mul, other)", "other): return self._scalar(operator.lshift, other) def __rlshift__(self, other): return self._scalar(operator.lshift, other,", "True) def __xor__(self, other): return self._scalar(operator.xor, other) def __rxor__(self, other):", "other) def __rdiv__(self, other): return self._scalar(operator.div, other, True) def __truediv__(self,", "with or without # modification, are permitted provided that the", "out class Methods(Common): @property def unit(self): return self / self.mag", "def __rand__(self, other): return self._scalar(operator.and_, other, True) def __or__(self, other):", "class Methods(Common): @property def unit(self): return self / self.mag @property", "- awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def __lt__(self, other): raise TypeError(\"spatial vectors", "return self._scalar(operator.mod, other, True) def __divmod__(self, other): return self._scalar(operator.divmod, other)", "self._vector(operator.add, other) def __radd__(self, other): return self._vector(operator.add, other, True) def", "if other == 2: return self.mag2 else: return self.mag2**(0.5*other) else:", "import awkward.util class Common(object): @property def mag2(self): return self.dot(self) @property", "following disclaimer. # # * Redistributions in binary form must", "denom[mask] denom[:] = awkward.util.numpy.sqrt(denom) out = self.dot(other) out[mask] /= denom", "awkward.util class Common(object): @property def mag2(self): return self.dot(self) @property def", "raise TypeError(\"spatial vectors have no natural ordering\") def __ge__(self, other):", "awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out def", "self._scalar(operator.mod, other) def __rmod__(self, other): return self._scalar(operator.mod, other, True) def", "= self.rho2 return awkward.util.numpy.sqrt(out) @property def phi(self): return awkward.util.numpy.arctan2(self.y, self.x)", "/= denom mask = awkward.util.numpy.logical_not(mask) out[mask] = 1 return awkward.util.numpy.clip(out,", "# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "tolerance def iscollinear(self, other, tolerance=1e-10): return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) <", "out def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) tmp.x =", "if degrees: out = out * 180.0/math.pi return out def", "other): return self._scalar(operator.mod, other) def __rmod__(self, other): return self._scalar(operator.mod, other,", "< tolerance and abs(tmp.y) < tolerance and abs(tmp.z) < tolerance", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES", "self.dot(other) out[mask] /= denom mask = awkward.util.numpy.logical_not(mask) out[mask] = 1", "= self.mag2 * other.mag2 mask = (denom > 0) denom", "= out + self.y*self.y return out def delta_phi(self, other): return", "return (self.phi - other.phi + math.pi) % (2*math.pi) - math.pi", "== 0 or m2 == 0: return 1.0 r =", "# this list of conditions and the following disclaimer in", "ordering\") class ArrayMethods(Common): @property def unit(self): return self / self.mag", "tolerance def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) return abs(tmp.x)", "IN ANY WAY OUT OF THE USE # OF THIS", "__or__(self, other): return self._scalar(operator.or_, other) def __ror__(self, other): return self._scalar(operator.or_,", "import numbers import operator import awkward import awkward.util class Common(object):", "# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "self.dot(other) / math.sqrt(m1 * m2) return max(-1.0, min(1.0, r)) def", "return self._scalar(operator.or_, other, True) def __xor__(self, other): return self._scalar(operator.xor, other)", "must retain the above copyright notice, this # list of", "prior written permission. # # THIS SOFTWARE IS PROVIDED BY", "other): return self._scalar(operator.div, other, True) def __truediv__(self, other): return self._scalar(operator.truediv,", "def unit(self): return self / self.mag @property def rho(self): out", "met: # # * Redistributions of source code must retain", "= self.unit b = other.unit out = out * awkward.util.numpy.sign(normal.dot(a.cross(b)))", "= 1 return awkward.util.numpy.clip(out, -1, 1) def angle(self, other, normal=None,", "self._scalar(operator.or_, other, True) def __xor__(self, other): return self._scalar(operator.xor, other) def", "__pos__(self): return self._unary(operator.pos) def __abs__(self): return self.mag def __invert__(self): return", "python # Copyright (c) 2018, DIANA-HEP # All rights reserved.", "FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT", "return self.mag2**(0.5*other) else: self._scalar(operator.pow, other) # no __rpow__ def __lshift__(self,", "the following disclaimer in the documentation # and/or other materials", "tolerance) return out class Methods(Common): @property def unit(self): return self", "HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR", "of conditions and the following disclaimer. # # * Redistributions", "degrees=False): out = math.acos(self.cosdelta(other)) if degrees: out = out *", "# * Redistributions in binary form must reproduce the above", "and use in source and binary forms, with or without", "= self.dot(other) out[mask] /= denom mask = awkward.util.numpy.logical_not(mask) out[mask] =", "= awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z) out =", "denom[:] = awkward.util.numpy.sqrt(denom) out = self.dot(other) out[mask] /= denom mask", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "= self + other tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y)", "SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR", "the copyright holder nor the names of its # contributors", "awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self, other): denom = self.mag2 * other.mag2", "= other.mag2 if m1 == 0 or m2 == 0:", "out[mask] /= denom mask = awkward.util.numpy.logical_not(mask) out[mask] = 1 return", "other, tolerance=1e-10): return self.cosdelta(other) - (-1) < tolerance def iscollinear(self,", "copyright holder nor the names of its # contributors may", "iscollinear(self, other, tolerance=1e-10): return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def", "self._scalar(operator.divmod, other, True) def __pow__(self, other): if isinstance(other, (numbers.Number, awkward.util.numpy.number)):", "PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE,", "(denom > 0) denom = denom[mask] denom[:] = awkward.util.numpy.sqrt(denom) out", "other): return self._scalar(operator.xor, other) def __rxor__(self, other): return self._scalar(operator.xor, other,", "< tolerance def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) return", "and abs(tmp.z) < tolerance def isperpendicular(self, other, tolerance=1e-10): tmp =", "__le__(self, other): raise TypeError(\"spatial vectors have no natural ordering\") def", "other, True) def __truediv__(self, other): return self._scalar(operator.truediv, other) def __rtruediv__(self,", "conditions and the following disclaimer in the documentation # and/or", "# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", "# no __rpow__ def __lshift__(self, other): return self._scalar(operator.lshift, other) def", "def __neg__(self): return self._unary(operator.neg) def __pos__(self): return self._unary(operator.pos) def __abs__(self):", "self._scalar(operator.and_, other) def __rand__(self, other): return self._scalar(operator.and_, other, True) def", "awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def __lt__(self, other): raise TypeError(\"spatial vectors have", "return awkward.util.numpy.sqrt(self.mag2) @property def rho2(self): out = self.x*self.x out =", "True) def __divmod__(self, other): return self._scalar(operator.divmod, other) def __rdivmod__(self, other):", "def unit(self): return self / self.mag @property def rho(self): return", "reproduce the above copyright notice, # this list of conditions", "and abs(tmp.y) < tolerance and abs(tmp.z) < tolerance def __add__(self,", "other, True) def __sub__(self, other): return self._vector(operator.sub, other) def __rsub__(self,", "def __divmod__(self, other): return self._scalar(operator.divmod, other) def __rdivmod__(self, other): return", "* m2) return max(-1.0, min(1.0, r)) def angle(self, other, degrees=False):", "AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "return self / self.mag @property def rho(self): return math.sqrt(self.rho2) @property", "in binary form must reproduce the above copyright notice, #", "denom mask = awkward.util.numpy.logical_not(mask) out[mask] = 1 return awkward.util.numpy.clip(out, -1,", "other): return self._scalar(operator.rshift, other) def __rrshift__(self, other): return self._scalar(operator.rshift, other,", "be used to endorse or promote products derived from #", "other.mag2 if m1 == 0 or m2 == 0: return", "forms, with or without # modification, are permitted provided that", "return math.sqrt(self.rho2) @property def phi(self): return math.atan2(self.y, self.x) def cosdelta(self,", "ARISING IN ANY WAY OUT OF THE USE # OF", "self._scalar(operator.xor, other) def __rxor__(self, other): return self._scalar(operator.xor, other, True) def", "self._scalar(operator.div, other) def __rdiv__(self, other): return self._scalar(operator.div, other, True) def", "binary forms, with or without # modification, are permitted provided", "def isantiparallel(self, other, tolerance=1e-10): return self.cosdelta(other) - (-1) < tolerance", "def __pos__(self): return self._unary(operator.pos) def __abs__(self): return self.mag def __invert__(self):", "# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "tolerance def __lt__(self, other): raise TypeError(\"spatial vectors have no natural", "math import numbers import operator import awkward import awkward.util class", "awkward.util.numpy.sqrt(denom) out = self.dot(other) out[mask] /= denom mask = awkward.util.numpy.logical_not(mask)", "m2 == 0: return 1.0 r = self.dot(other) / math.sqrt(m1", "# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR", "isopposite(self, other, tolerance=1e-10): tmp = self + other tmp.x =", "specific prior written permission. # # THIS SOFTWARE IS PROVIDED", "contributors may be used to endorse or promote products derived", "import operator import awkward import awkward.util class Common(object): @property def", "def __lt__(self, other): raise TypeError(\"spatial vectors have no natural ordering\")", "0) denom = denom[mask] denom[:] = awkward.util.numpy.sqrt(denom) out = self.dot(other)", "# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "< tolerance and abs(tmp.z) < tolerance def __add__(self, other): return", "__and__(self, other): return self._scalar(operator.and_, other) def __rand__(self, other): return self._scalar(operator.and_,", "LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "THE POSSIBILITY OF SUCH DAMAGE. import math import numbers import", "class Common(object): @property def mag2(self): return self.dot(self) @property def mag(self):", "vectors have no natural ordering\") def __ge__(self, other): raise TypeError(\"spatial", "return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance and abs(tmp.z)", "provided that the following conditions are met: # # *", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF", "def __rtruediv__(self, other): return self._scalar(operator.truediv, other, True) def __floordiv__(self, other):", "- math.pi def isparallel(self, other, tolerance=1e-10): return 1 - self.cosdelta(other)", "the documentation # and/or other materials provided with the distribution.", "tmp = self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z", "__rand__(self, other): return self._scalar(operator.and_, other, True) def __or__(self, other): return", "return self._scalar(operator.mul, other) def __rmul__(self, other): return self._scalar(operator.mul, other, True)", "__floordiv__(self, other): return self._scalar(operator.floordiv, other) def __rfloordiv__(self, other): return self._scalar(operator.floordiv,", "raise TypeError(\"spatial vectors have no natural ordering\") class ArrayMethods(Common): @property", "have no natural ordering\") def __ge__(self, other): raise TypeError(\"spatial vectors", "@property def phi(self): return awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self, other): denom", "are permitted provided that the following conditions are met: #", "OF SUCH DAMAGE. import math import numbers import operator import", "__divmod__(self, other): return self._scalar(operator.divmod, other) def __rdivmod__(self, other): return self._scalar(operator.divmod,", "above copyright notice, this # list of conditions and the", "normal is not None: a = self.unit b = other.unit", "other, tolerance=1e-10): tmp = self + other return abs(tmp.x) <", "# list of conditions and the following disclaimer. # #", "__sub__(self, other): return self._vector(operator.sub, other) def __rsub__(self, other): return self._vector(operator.sub,", "the name of the copyright holder nor the names of", "natural ordering\") def __ge__(self, other): raise TypeError(\"spatial vectors have no", "self.x) def cosdelta(self, other): m1 = self.mag2 m2 = other.mag2", "mag(self): return awkward.util.numpy.sqrt(self.mag2) @property def rho2(self): out = self.x*self.x out", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "# All rights reserved. # # Redistribution and use in", "other): m1 = self.mag2 m2 = other.mag2 if m1 ==", "True) def __mul__(self, other): return self._scalar(operator.mul, other) def __rmul__(self, other):", "other) def __rmod__(self, other): return self._scalar(operator.mod, other, True) def __divmod__(self,", "phi(self): return awkward.util.numpy.arctan2(self.y, self.x) def cosdelta(self, other): denom = self.mag2", "no natural ordering\") class ArrayMethods(Common): @property def unit(self): return self", "Redistributions in binary form must reproduce the above copyright notice,", "else: return self.mag2**(0.5*other) else: self._scalar(operator.pow, other) # no __rpow__ def", "vectors have no natural ordering\") class ArrayMethods(Common): @property def unit(self):", "unit(self): return self / self.mag @property def rho(self): out =", "= (denom > 0) denom = denom[mask] denom[:] = awkward.util.numpy.sqrt(denom)", "# Redistribution and use in source and binary forms, with", "self.x) def cosdelta(self, other): denom = self.mag2 * other.mag2 mask", "def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) return abs(tmp.x) <", "natural ordering\") def __le__(self, other): raise TypeError(\"spatial vectors have no", "AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN", "the above copyright notice, # this list of conditions and", "the following conditions are met: # # * Redistributions of", "math.sqrt(self.rho2) @property def phi(self): return math.atan2(self.y, self.x) def cosdelta(self, other):", "out[mask] = 1 return awkward.util.numpy.clip(out, -1, 1) def angle(self, other,", "True) def __floordiv__(self, other): return self._scalar(operator.floordiv, other) def __rfloordiv__(self, other):", "other, tolerance=1e-10): tmp = self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y =", "other) def __rand__(self, other): return self._scalar(operator.and_, other, True) def __or__(self,", "other.unit out = out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out =", "HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "awkward.util.numpy.bitwise_and(out, tmp.y < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance)", "awkward.util.numpy.clip(out, -1, 1) def angle(self, other, normal=None, degrees=False): out =", "return 1.0 r = self.dot(other) / math.sqrt(m1 * m2) return", "other): return self._scalar(operator.or_, other) def __ror__(self, other): return self._scalar(operator.or_, other,", "def rho(self): return math.sqrt(self.rho2) @property def phi(self): return math.atan2(self.y, self.x)", "other): return self._scalar(operator.floordiv, other, True) def __mod__(self, other): return self._scalar(operator.mod,", "other return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance and", "other): return self._scalar(operator.mod, other, True) def __divmod__(self, other): return self._scalar(operator.divmod,", "< tolerance and abs(tmp.z) < tolerance def isperpendicular(self, other, tolerance=1e-10):", "< tolerance def isantiparallel(self, other, tolerance=1e-10): return self.cosdelta(other) - (-1)", "list of conditions and the following disclaimer in the documentation", "return 1 - self.cosdelta(other) < tolerance def isantiparallel(self, other, tolerance=1e-10):", "def isopposite(self, other, tolerance=1e-10): tmp = self + other return", "* Redistributions of source code must retain the above copyright", "isinstance(other, (numbers.Number, awkward.util.numpy.number)): if other == 2: return self.mag2 else:", "other == 2: return self.mag2 else: return self.mag2**(0.5*other) else: self._scalar(operator.pow,", "self.mag2**(0.5*other) else: self._scalar(operator.pow, other) # no __rpow__ def __lshift__(self, other):", "# modification, are permitted provided that the following conditions are", "mask = (denom > 0) denom = denom[mask] denom[:] =", "the following disclaimer. # # * Redistributions in binary form", "other): return self._scalar(operator.lshift, other, True) def __rshift__(self, other): return self._scalar(operator.rshift,", "return self._scalar(operator.rshift, other, True) def __and__(self, other): return self._scalar(operator.and_, other)", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS", "following disclaimer in the documentation # and/or other materials provided", "= self.dot(other) / math.sqrt(m1 * m2) return max(-1.0, min(1.0, r))", "other): return self._scalar(operator.divmod, other) def __rdivmod__(self, other): return self._scalar(operator.divmod, other,", "FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO", "return math.atan2(self.y, self.x) def cosdelta(self, other): m1 = self.mag2 m2", "self.dot(self) @property def mag(self): return awkward.util.numpy.sqrt(self.mag2) @property def rho2(self): out", "return max(-1.0, min(1.0, r)) def angle(self, other, degrees=False): out =", "math.pi) % (2*math.pi) - math.pi def isparallel(self, other, tolerance=1e-10): return", "have no natural ordering\") def __le__(self, other): raise TypeError(\"spatial vectors", "other tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z)", "self.dot(other) return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance and", "may be used to endorse or promote products derived from", "return self._scalar(operator.floordiv, other, True) def __mod__(self, other): return self._scalar(operator.mod, other)", "def __ge__(self, other): raise TypeError(\"spatial vectors have no natural ordering\")", "self.mag2 * other.mag2 mask = (denom > 0) denom =", "self.mag2 m2 = other.mag2 if m1 == 0 or m2", "tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z) out = (tmp.x <", "= self.x*self.x out = out + self.y*self.y return out def", "tmp.z < tolerance) return out def isperpendicular(self, other, tolerance=1e-10): tmp", "# # * Redistributions in binary form must reproduce the", "return self.mag2 else: return self.mag2**(0.5*other) else: self._scalar(operator.pow, other) # no", "return self._vector(operator.sub, other) def __rsub__(self, other): return self._vector(operator.sub, other, True)", "return self._scalar(operator.div, other, True) def __truediv__(self, other): return self._scalar(operator.truediv, other)", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED", "or promote products derived from # this software without specific", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out class Methods(Common): @property def", "isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "def __radd__(self, other): return self._vector(operator.add, other, True) def __sub__(self, other):", "copyright notice, # this list of conditions and the following", "- (-1) < tolerance def iscollinear(self, other, tolerance=1e-10): return 1", "degrees: out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out def isopposite(self, other,", "def __sub__(self, other): return self._vector(operator.sub, other) def __rsub__(self, other): return", "rho2(self): out = self.x*self.x out = out + self.y*self.y return", "def __le__(self, other): raise TypeError(\"spatial vectors have no natural ordering\")", "def __truediv__(self, other): return self._scalar(operator.truediv, other) def __rtruediv__(self, other): return", "return self._scalar(operator.or_, other) def __ror__(self, other): return self._scalar(operator.or_, other, True)", "self._unary(operator.neg) def __pos__(self): return self._unary(operator.pos) def __abs__(self): return self.mag def", "return self._scalar(operator.lshift, other) def __rlshift__(self, other): return self._scalar(operator.lshift, other, True)", "OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA,", "tolerance=1e-10): tmp = self.dot(other) return abs(tmp.x) < tolerance and abs(tmp.y)", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES", "following conditions are met: # # * Redistributions of source", "self._scalar(operator.div, other, True) def __truediv__(self, other): return self._scalar(operator.truediv, other) def", "b = other.unit out = out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees:", "def __lshift__(self, other): return self._scalar(operator.lshift, other) def __rlshift__(self, other): return", "import math import numbers import operator import awkward import awkward.util", "the names of its # contributors may be used to", "INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF", "* Neither the name of the copyright holder nor the", "tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z) out", "OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "other): return (self.phi - other.phi + math.pi) % (2*math.pi) -", "# Copyright (c) 2018, DIANA-HEP # All rights reserved. #", "the above copyright notice, this # list of conditions and", "and the following disclaimer in the documentation # and/or other", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "promote products derived from # this software without specific prior", "other): return self._scalar(operator.rshift, other, True) def __and__(self, other): return self._scalar(operator.and_,", "def rho(self): out = self.rho2 return awkward.util.numpy.sqrt(out) @property def phi(self):", "conditions and the following disclaimer. # # * Redistributions in", "< tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out", "other) def __radd__(self, other): return self._vector(operator.add, other, True) def __sub__(self,", "rho(self): return math.sqrt(self.rho2) @property def phi(self): return math.atan2(self.y, self.x) def", "no __rpow__ def __lshift__(self, other): return self._scalar(operator.lshift, other) def __rlshift__(self,", "other): return self._scalar(operator.or_, other, True) def __xor__(self, other): return self._scalar(operator.xor,", "out = (tmp.x < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.y <", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import math", "m2 = other.mag2 if m1 == 0 or m2 ==", "def angle(self, other, normal=None, degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other)) if normal", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS", "== 0: return 1.0 r = self.dot(other) / math.sqrt(m1 *", "LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "1.0 r = self.dot(other) / math.sqrt(m1 * m2) return max(-1.0,", "tolerance def isantiparallel(self, other, tolerance=1e-10): return self.cosdelta(other) - (-1) <", "self.mag @property def rho(self): out = self.rho2 return awkward.util.numpy.sqrt(out) @property", "out * 180.0/math.pi return out def isopposite(self, other, tolerance=1e-10): tmp", "other): return self._scalar(operator.floordiv, other) def __rfloordiv__(self, other): return self._scalar(operator.floordiv, other,", "All rights reserved. # # Redistribution and use in source", "tolerance=1e-10): tmp = self + other return abs(tmp.x) < tolerance", "other, tolerance=1e-10): return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def __lt__(self,", "other, tolerance=1e-10): tmp = self.dot(other) return abs(tmp.x) < tolerance and", "without # modification, are permitted provided that the following conditions", "CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "other): raise TypeError(\"spatial vectors have no natural ordering\") def __le__(self,", "normal=None, degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other)) if normal is not None:", "def mag2(self): return self.dot(self) @property def mag(self): return awkward.util.numpy.sqrt(self.mag2) @property", "class ArrayMethods(Common): @property def unit(self): return self / self.mag @property", "this list of conditions and the following disclaimer in the", "used to endorse or promote products derived from # this", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY", "other) def __rdivmod__(self, other): return self._scalar(operator.divmod, other, True) def __pow__(self,", "m1 = self.mag2 m2 = other.mag2 if m1 == 0", "modification, are permitted provided that the following conditions are met:", "IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR", "THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF", "delta_phi(self, other): return (self.phi - other.phi + math.pi) % (2*math.pi)", "< tolerance) return out class Methods(Common): @property def unit(self): return", "other, True) def __floordiv__(self, other): return self._scalar(operator.floordiv, other) def __rfloordiv__(self,", "== 2: return self.mag2 else: return self.mag2**(0.5*other) else: self._scalar(operator.pow, other)", "TypeError(\"spatial vectors have no natural ordering\") def __gt__(self, other): raise", "1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def __lt__(self, other): raise TypeError(\"spatial", "of the copyright holder nor the names of its #", "def __mul__(self, other): return self._scalar(operator.mul, other) def __rmul__(self, other): return", "TypeError(\"spatial vectors have no natural ordering\") def __ge__(self, other): raise", "tolerance and abs(tmp.y) < tolerance and abs(tmp.z) < tolerance def", "other, True) def __mod__(self, other): return self._scalar(operator.mod, other) def __rmod__(self,", "awkward.util.numpy.arccos(self.cosdelta(other)) if normal is not None: a = self.unit b", "self._scalar(operator.mul, other, True) def __div__(self, other): return self._scalar(operator.div, other) def", "DIANA-HEP # All rights reserved. # # Redistribution and use", "self._scalar(operator.rshift, other) def __rrshift__(self, other): return self._scalar(operator.rshift, other, True) def", "def angle(self, other, degrees=False): out = math.acos(self.cosdelta(other)) if degrees: out", "self + other return abs(tmp.x) < tolerance and abs(tmp.y) <", "tmp = self + other tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y =", "def isopposite(self, other, tolerance=1e-10): tmp = self + other tmp.x", "def __rrshift__(self, other): return self._scalar(operator.rshift, other, True) def __and__(self, other):", "def cosdelta(self, other): denom = self.mag2 * other.mag2 mask =", "None: a = self.unit b = other.unit out = out", "def phi(self): return math.atan2(self.y, self.x) def cosdelta(self, other): m1 =", "tmp = self + other return abs(tmp.x) < tolerance and", "def __add__(self, other): return self._vector(operator.add, other) def __radd__(self, other): return", "reserved. # # Redistribution and use in source and binary", "and abs(tmp.y) < tolerance and abs(tmp.z) < tolerance def isperpendicular(self,", "self._scalar(operator.truediv, other, True) def __floordiv__(self, other): return self._scalar(operator.floordiv, other) def", "= awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out def isopposite(self, other, tolerance=1e-10): tmp", "operator import awkward import awkward.util class Common(object): @property def mag2(self):", "1) def angle(self, other, normal=None, degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other)) if", "= self + other return abs(tmp.x) < tolerance and abs(tmp.y)", "self._scalar(operator.and_, other, True) def __or__(self, other): return self._scalar(operator.or_, other) def", "= other.unit out = out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out", "other): return self._scalar(operator.and_, other) def __rand__(self, other): return self._scalar(operator.and_, other,", "self._scalar(operator.floordiv, other) def __rfloordiv__(self, other): return self._scalar(operator.floordiv, other, True) def", "* 180.0/math.pi return out def isopposite(self, other, tolerance=1e-10): tmp =", "self._scalar(operator.lshift, other) def __rlshift__(self, other): return self._scalar(operator.lshift, other, True) def", "< tolerance def __lt__(self, other): raise TypeError(\"spatial vectors have no", "return self / self.mag @property def rho(self): out = self.rho2", "self + other tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z", "self.cosdelta(other) - (-1) < tolerance def iscollinear(self, other, tolerance=1e-10): return", "def __rmod__(self, other): return self._scalar(operator.mod, other, True) def __divmod__(self, other):", "other, True) def __divmod__(self, other): return self._scalar(operator.divmod, other) def __rdivmod__(self,", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "True) def __mod__(self, other): return self._scalar(operator.mod, other) def __rmod__(self, other):", "# * Redistributions of source code must retain the above", "def __ror__(self, other): return self._scalar(operator.or_, other, True) def __xor__(self, other):", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #", "natural ordering\") def __gt__(self, other): raise TypeError(\"spatial vectors have no", "the distribution. # # * Neither the name of the", "@property def rho2(self): out = self.x*self.x out = out +", "denom = self.mag2 * other.mag2 mask = (denom > 0)", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "def iscollinear(self, other, tolerance=1e-10): return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance", "angle(self, other, degrees=False): out = math.acos(self.cosdelta(other)) if degrees: out =", "def __pow__(self, other): if isinstance(other, (numbers.Number, awkward.util.numpy.number)): if other ==", "denom = denom[mask] denom[:] = awkward.util.numpy.sqrt(denom) out = self.dot(other) out[mask]", "return self._scalar(operator.rshift, other) def __rrshift__(self, other): return self._scalar(operator.rshift, other, True)", "= awkward.util.numpy.sqrt(denom) out = self.dot(other) out[mask] /= denom mask =", "holder nor the names of its # contributors may be", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out def isopposite(self, other, tolerance=1e-10):", "math.sqrt(m1 * m2) return max(-1.0, min(1.0, r)) def angle(self, other,", "other) def __rfloordiv__(self, other): return self._scalar(operator.floordiv, other, True) def __mod__(self,", "0 or m2 == 0: return 1.0 r = self.dot(other)", "to endorse or promote products derived from # this software", "+ other return abs(tmp.x) < tolerance and abs(tmp.y) < tolerance", "NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND", "ordering\") def __le__(self, other): raise TypeError(\"spatial vectors have no natural", "/ math.sqrt(m1 * m2) return max(-1.0, min(1.0, r)) def angle(self,", "@property def mag(self): return awkward.util.numpy.sqrt(self.mag2) @property def rho2(self): out =", "__rmul__(self, other): return self._scalar(operator.mul, other, True) def __div__(self, other): return", "return out def delta_phi(self, other): return (self.phi - other.phi +", "def rho2(self): out = self.x*self.x out = out + self.y*self.y", "ordering\") def __gt__(self, other): raise TypeError(\"spatial vectors have no natural", "abs(tmp.y) < tolerance and abs(tmp.z) < tolerance def isperpendicular(self, other,", "USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED", "BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY", "its # contributors may be used to endorse or promote", "INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY,", "180.0/math.pi return out def isopposite(self, other, tolerance=1e-10): tmp = self", "out = self.x*self.x out = out + self.y*self.y return out", "IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT", "other, tolerance=1e-10): return 1 - self.cosdelta(other) < tolerance def isantiparallel(self,", "other): raise TypeError(\"spatial vectors have no natural ordering\") class ArrayMethods(Common):", "return self._scalar(operator.divmod, other) def __rdivmod__(self, other): return self._scalar(operator.divmod, other, True)", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #", "tmp = self.dot(other) return abs(tmp.x) < tolerance and abs(tmp.y) <", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "def __and__(self, other): return self._scalar(operator.and_, other) def __rand__(self, other): return", "tolerance and abs(tmp.z) < tolerance def __add__(self, other): return self._vector(operator.add,", "other): return self._scalar(operator.truediv, other, True) def __floordiv__(self, other): return self._scalar(operator.floordiv,", "of its # contributors may be used to endorse or", "= denom[mask] denom[:] = awkward.util.numpy.sqrt(denom) out = self.dot(other) out[mask] /=", "tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance) out = awkward.util.numpy.bitwise_and(out,", "# # * Neither the name of the copyright holder", "THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "return self._scalar(operator.floordiv, other) def __rfloordiv__(self, other): return self._scalar(operator.floordiv, other, True)", "def cosdelta(self, other): m1 = self.mag2 m2 = other.mag2 if", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS", "def isperpendicular(self, other, tolerance=1e-10): tmp = self.dot(other) tmp.x = awkward.util.numpy.absolute(tmp.x)", "return self.dot(self) @property def mag(self): return awkward.util.numpy.sqrt(self.mag2) @property def rho2(self):", "return self._scalar(operator.lshift, other, True) def __rshift__(self, other): return self._scalar(operator.rshift, other)", "@property def rho(self): return math.sqrt(self.rho2) @property def phi(self): return math.atan2(self.y,", "__xor__(self, other): return self._scalar(operator.xor, other) def __rxor__(self, other): return self._scalar(operator.xor,", "return self._scalar(operator.div, other) def __rdiv__(self, other): return self._scalar(operator.div, other, True)", "def __rlshift__(self, other): return self._scalar(operator.lshift, other, True) def __rshift__(self, other):", "= awkward.util.numpy.arccos(self.cosdelta(other)) if normal is not None: a = self.unit", "def mag(self): return awkward.util.numpy.sqrt(self.mag2) @property def rho2(self): out = self.x*self.x", "self._scalar(operator.floordiv, other, True) def __mod__(self, other): return self._scalar(operator.mod, other) def", "derived from # this software without specific prior written permission.", "+ other tmp.x = awkward.util.numpy.absolute(tmp.x) tmp.y = awkward.util.numpy.absolute(tmp.y) tmp.z =", "awkward.util.numpy.absolute(tmp.y) tmp.z = awkward.util.numpy.absolute(tmp.z) out = (tmp.x < tolerance) out", "@property def phi(self): return math.atan2(self.y, self.x) def cosdelta(self, other): m1", "__div__(self, other): return self._scalar(operator.div, other) def __rdiv__(self, other): return self._scalar(operator.div,", "mag2(self): return self.dot(self) @property def mag(self): return awkward.util.numpy.sqrt(self.mag2) @property def", "angle(self, other, normal=None, degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other)) if normal is", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "tolerance=1e-10): return 1 - awkward.util.numpy.absolute(self.cosdelta(other)) < tolerance def __lt__(self, other):", "self._scalar(operator.divmod, other) def __rdivmod__(self, other): return self._scalar(operator.divmod, other, True) def", "__rmod__(self, other): return self._scalar(operator.mod, other, True) def __divmod__(self, other): return", "ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "conditions are met: # # * Redistributions of source code", "PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY", "cosdelta(self, other): m1 = self.mag2 m2 = other.mag2 if m1", "self.unit b = other.unit out = out * awkward.util.numpy.sign(normal.dot(a.cross(b))) if", "if m1 == 0 or m2 == 0: return 1.0", "have no natural ordering\") def __gt__(self, other): raise TypeError(\"spatial vectors", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import math import", "OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER #", "(self.phi - other.phi + math.pi) % (2*math.pi) - math.pi def", "if isinstance(other, (numbers.Number, awkward.util.numpy.number)): if other == 2: return self.mag2", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED", "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #", "2018, DIANA-HEP # All rights reserved. # # Redistribution and", "other, normal=None, degrees=False): out = awkward.util.numpy.arccos(self.cosdelta(other)) if normal is not", "out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return out def isperpendicular(self,", "not None: a = self.unit b = other.unit out =", "other, True) def __and__(self, other): return self._scalar(operator.and_, other) def __rand__(self,", "vectors have no natural ordering\") def __le__(self, other): raise TypeError(\"spatial", "tmp.y < tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.z < tolerance) return", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND", "< tolerance) out = awkward.util.numpy.bitwise_and(out, tmp.y < tolerance) out =", "other, True) def __neg__(self): return self._unary(operator.neg) def __pos__(self): return self._unary(operator.pos)", "<gh_stars>0 #!/usr/bin/env python # Copyright (c) 2018, DIANA-HEP # All", "* awkward.util.numpy.sign(normal.dot(a.cross(b))) if degrees: out = awkward.util.numpy.multiply(out, 180.0/awkward.util.numpy.pi) return out", "Neither the name of the copyright holder nor the names", "is not None: a = self.unit b = other.unit out", "EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO," ]
[ "self.pages: while True: eop = page.is_eop() self.vis.visualize(page.next_line(), eop) if eop:", "TPP souce code into another format) \"\"\" from tpp.FileParser import", "code into another format) \"\"\" def __init__(self, input_file, output, visualizer_class):", "non-interactive visualizers. (i.e. those that are used for converting TPP", "\"\"\" def __init__(self, input_file, output, visualizer_class): \"\"\" Todo: ApiDoc. :rtype:", "page.is_eop() self.vis.visualize(page.next_line(), eop) if eop: break def close(self): \"\"\" Todo:", "input: :param output: :param visualizer_class: \"\"\" super(ConversionController, self).__init__() parser =", "self.pages = parser.get_pages() self.vis = visualizer_class(output) def run(self): \"\"\" Todo:", "output: :param visualizer_class: \"\"\" super(ConversionController, self).__init__() parser = FileParser(input_file) self.pages", "for converting TPP souce code into another format) \"\"\" from", "visualizer_class: \"\"\" super(ConversionController, self).__init__() parser = FileParser(input_file) self.pages = parser.get_pages()", "for page in self.pages: while True: eop = page.is_eop() self.vis.visualize(page.next_line(),", "Implements a non interactive controller to run non-interactive visualizers. (i.e.", "__init__(self, input_file, output, visualizer_class): \"\"\" Todo: ApiDoc. :rtype: :param input:", "run(self): \"\"\" Todo: ApiDoc. :return: \"\"\" for page in self.pages:", "controller to run non-interactive visualizers. (i.e. those that are used", "TPP source code into another format) \"\"\" def __init__(self, input_file,", ":rtype: :param input: :param output: :param visualizer_class: \"\"\" super(ConversionController, self).__init__()", "another format) \"\"\" def __init__(self, input_file, output, visualizer_class): \"\"\" Todo:", "format) \"\"\" def __init__(self, input_file, output, visualizer_class): \"\"\" Todo: ApiDoc.", "that are used for converting TPP souce code into another", "self).__init__() parser = FileParser(input_file) self.pages = parser.get_pages() self.vis = visualizer_class(output)", "tpp.FileParser import FileParser from tpp.controller.TPPController import TPPController class ConversionController(TPPController): \"\"\"", "class ConversionController(TPPController): \"\"\" Implements a non interactive controller to run", "that are used for converting TPP source code into another", "parser.get_pages() self.vis = visualizer_class(output) def run(self): \"\"\" Todo: ApiDoc. :return:", "while True: eop = page.is_eop() self.vis.visualize(page.next_line(), eop) if eop: break", "interactive controller to controt non-interactive visualizers. (i.e. those that are", "visualizers. (i.e. those that are used for converting TPP source", "are used for converting TPP souce code into another format)", "are used for converting TPP source code into another format)", "visualizer_class(output) def run(self): \"\"\" Todo: ApiDoc. :return: \"\"\" for page", ":param visualizer_class: \"\"\" super(ConversionController, self).__init__() parser = FileParser(input_file) self.pages =", "interactive controller to run non-interactive visualizers. (i.e. those that are", "FileParser(input_file) self.pages = parser.get_pages() self.vis = visualizer_class(output) def run(self): \"\"\"", "TPPController class ConversionController(TPPController): \"\"\" Implements a non interactive controller to", "if eop: break def close(self): \"\"\" Todo: ApiDoc. :return: \"\"\"", "source code into another format) \"\"\" def __init__(self, input_file, output,", "\"\"\" from tpp.FileParser import FileParser from tpp.controller.TPPController import TPPController class", ":return: \"\"\" for page in self.pages: while True: eop =", "souce code into another format) \"\"\" from tpp.FileParser import FileParser", "\"\"\" Todo: ApiDoc. :rtype: :param input: :param output: :param visualizer_class:", "to run non-interactive visualizers. (i.e. those that are used for", "ApiDoc. :rtype: :param input: :param output: :param visualizer_class: \"\"\" super(ConversionController,", "run non-interactive visualizers. (i.e. those that are used for converting", "format) \"\"\" from tpp.FileParser import FileParser from tpp.controller.TPPController import TPPController", "Implements a non interactive controller to controt non-interactive visualizers. (i.e.", "converting TPP souce code into another format) \"\"\" from tpp.FileParser", "import TPPController class ConversionController(TPPController): \"\"\" Implements a non interactive controller", "into another format) \"\"\" from tpp.FileParser import FileParser from tpp.controller.TPPController", "non interactive controller to run non-interactive visualizers. (i.e. those that", "from tpp.FileParser import FileParser from tpp.controller.TPPController import TPPController class ConversionController(TPPController):", "converting TPP source code into another format) \"\"\" def __init__(self,", "controt non-interactive visualizers. (i.e. those that are used for converting", "those that are used for converting TPP souce code into", "True: eop = page.is_eop() self.vis.visualize(page.next_line(), eop) if eop: break def", "= FileParser(input_file) self.pages = parser.get_pages() self.vis = visualizer_class(output) def run(self):", "eop) if eop: break def close(self): \"\"\" Todo: ApiDoc. :return:", "ApiDoc. :return: \"\"\" for page in self.pages: while True: eop", "code into another format) \"\"\" from tpp.FileParser import FileParser from", "FileParser from tpp.controller.TPPController import TPPController class ConversionController(TPPController): \"\"\" Implements a", "another format) \"\"\" from tpp.FileParser import FileParser from tpp.controller.TPPController import", "controller to controt non-interactive visualizers. (i.e. those that are used", "parser = FileParser(input_file) self.pages = parser.get_pages() self.vis = visualizer_class(output) def", "to controt non-interactive visualizers. (i.e. those that are used for", "self.vis = visualizer_class(output) def run(self): \"\"\" Todo: ApiDoc. :return: \"\"\"", "ConversionController(TPPController): \"\"\" Implements a non interactive controller to run non-interactive", "(i.e. those that are used for converting TPP source code", "\"\"\" super(ConversionController, self).__init__() parser = FileParser(input_file) self.pages = parser.get_pages() self.vis", "def run(self): \"\"\" Todo: ApiDoc. :return: \"\"\" for page in", "= page.is_eop() self.vis.visualize(page.next_line(), eop) if eop: break def close(self): \"\"\"", "used for converting TPP souce code into another format) \"\"\"", "non interactive controller to controt non-interactive visualizers. (i.e. those that", "a non interactive controller to controt non-interactive visualizers. (i.e. those", "visualizers. (i.e. those that are used for converting TPP souce", "visualizer_class): \"\"\" Todo: ApiDoc. :rtype: :param input: :param output: :param", "Todo: ApiDoc. :rtype: :param input: :param output: :param visualizer_class: \"\"\"", "used for converting TPP source code into another format) \"\"\"", "\"\"\" Todo: ApiDoc. :return: \"\"\" for page in self.pages: while", "(i.e. those that are used for converting TPP souce code", "super(ConversionController, self).__init__() parser = FileParser(input_file) self.pages = parser.get_pages() self.vis =", "\"\"\" for page in self.pages: while True: eop = page.is_eop()", "a non interactive controller to run non-interactive visualizers. (i.e. those", "those that are used for converting TPP source code into", "= parser.get_pages() self.vis = visualizer_class(output) def run(self): \"\"\" Todo: ApiDoc.", "\"\"\" Implements a non interactive controller to controt non-interactive visualizers.", "def __init__(self, input_file, output, visualizer_class): \"\"\" Todo: ApiDoc. :rtype: :param", ":param input: :param output: :param visualizer_class: \"\"\" super(ConversionController, self).__init__() parser", "in self.pages: while True: eop = page.is_eop() self.vis.visualize(page.next_line(), eop) if", "into another format) \"\"\" def __init__(self, input_file, output, visualizer_class): \"\"\"", "page in self.pages: while True: eop = page.is_eop() self.vis.visualize(page.next_line(), eop)", "Todo: ApiDoc. :return: \"\"\" for page in self.pages: while True:", "\"\"\" Implements a non interactive controller to run non-interactive visualizers.", "import FileParser from tpp.controller.TPPController import TPPController class ConversionController(TPPController): \"\"\" Implements", "for converting TPP source code into another format) \"\"\" def", "self.vis.visualize(page.next_line(), eop) if eop: break def close(self): \"\"\" Todo: ApiDoc.", "eop: break def close(self): \"\"\" Todo: ApiDoc. :return: \"\"\" self.vis.close()", ":param output: :param visualizer_class: \"\"\" super(ConversionController, self).__init__() parser = FileParser(input_file)", "input_file, output, visualizer_class): \"\"\" Todo: ApiDoc. :rtype: :param input: :param", "eop = page.is_eop() self.vis.visualize(page.next_line(), eop) if eop: break def close(self):", "from tpp.controller.TPPController import TPPController class ConversionController(TPPController): \"\"\" Implements a non", "output, visualizer_class): \"\"\" Todo: ApiDoc. :rtype: :param input: :param output:", "tpp.controller.TPPController import TPPController class ConversionController(TPPController): \"\"\" Implements a non interactive", "= visualizer_class(output) def run(self): \"\"\" Todo: ApiDoc. :return: \"\"\" for" ]
[ "= tree.find('tr', id='LC34').find_all('td') country_code = html_content[1] date_test = html_content[3].text if", "'Content-type': 'application/json', } data = json.dumps({\"date\": today, \"daily_test\": tests}) response_tests", "= requests.post( self.api_test_url, headers=headers, data=data) return response_tests.json() def call_api_put_data(self, today,", "{\"date\": today, \"cases\": covid_data[0], \"deaths\": covid_data[1]}) sum_data = json.dumps( {\"sum_cases\":", "= self.call_api_put_data( self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data =", "'div', class_='maincounter-number') for item in range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s' all_cases_data", "= { 'Content-type': 'application/json', } data = json.dumps( {\"date\": today,", "response = requests.post(self.api_url, headers=headers, data=data) response_sum = requests.put( self.api_sum_url, headers=headers,", "if tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return data", "return soup def scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content,", "get_daily_data(self, soup): covid_data = [] daily_covidgr_html_content = soup.find('li', class_='news_li') get_daily_covidgr_text", "self.api_test_url, headers=headers, data=data) return response_tests.json() def call_api_put_data(self, today, covid_data, summary_data):", "= { 'Content-type': 'application/json', } data = json.dumps({\"date\": today, \"daily_test\":", "tests): headers = { 'Content-type': 'application/json', } data = json.dumps({\"date\":", "get_daily_covidgr_text = daily_covidgr_html_content.text for elem in get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+'", "[date_test, today_tests] def call_api_post_tested_covid_data(self, today, tests): headers = { 'Content-type':", "self.summary_data = summary_data def get_tests_per_day(self, tree): html_content = tree.find('tr', id='LC34').find_all('td')", "def get_tests_per_day(self, tree): html_content = tree.find('tr', id='LC34').find_all('td') country_code = html_content[1]", "self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = '' self.covid_data = [] self.summary_data=", "import json class CovidScraper: def __init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url", "response_tests = requests.post( self.api_test_url, headers=headers, data=data) return response_tests.json() def call_api_put_data(self,", "post_daily_and_sum_covid_data = self.call_api_put_data( self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data", "} data = json.dumps( {\"date\": today, \"cases\": covid_data[0], \"deaths\": covid_data[1]})", "all_cases_covidgr_html_content = soup.find_all( 'div', class_='maincounter-number') for item in range(len(all_cases_covidgr_html_content)): regex", "lxml import html import requests import re import json class", "requests import re import json class CovidScraper: def __init__(self): self.api_url", "'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = '' self.covid_data = []", "= self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup) self.get_summary_data(soup) if", "daily_covidgr_html_content = soup.find('li', class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text for elem in", "= html_content[3].text if country_code.text == 'GRC': today_tests = html_content[10].text total_tests", "= 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today =", "self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data", "= 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url =", "import BeautifulSoup from datetime import date from lxml import html", "self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data and self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data( self.today,", "scrape_data(self): data = [] self.today = str(date.today()) soup = self.scrape_page_content()", "= html_content[8].text return [date_test, today_tests] def call_api_post_tested_covid_data(self, today, tests): headers", "{ 'Content-type': 'application/json', } data = json.dumps({\"date\": today, \"daily_test\": tests})", "return response_tests.json() def call_api_put_data(self, today, covid_data, summary_data): headers = {", "__init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests'", "return data def scrape_page_content(self): page = requests.get(self.scrape_url) soup = BeautifulSoup(page.content,", "tests}) response_tests = requests.post( self.api_test_url, headers=headers, data=data) return response_tests.json() def", "date_test = html_content[3].text if country_code.text == 'GRC': today_tests = html_content[10].text", "CovidScraper: def __init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url", "class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text for elem in get_daily_covidgr_text.split(): regex =", "json.dumps( {\"date\": today, \"cases\": covid_data[0], \"deaths\": covid_data[1]}) sum_data = json.dumps(", "'GRC': today_tests = html_content[10].text total_tests = html_content[8].text return [date_test, today_tests]", "def __init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url =", "html_content[1] date_test = html_content[3].text if country_code.text == 'GRC': today_tests =", "== 'GRC': today_tests = html_content[10].text total_tests = html_content[8].text return [date_test,", "if self.summary_data and self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data( self.today, self.covid_data, self.summary_data)", "elem) if match: covid_data.append(elem) self.covid_data = covid_data def get_summary_data(self, soup):", "self.call_api_put_data( self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data = self.get_tests_per_day(soup_test_page)", "def scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content, 'html.parser') return", "covid_data.append(elem) self.covid_data = covid_data def get_summary_data(self, soup): summary_data = []", "BeautifulSoup(page.content, 'html.parser') return soup def get_daily_data(self, soup): covid_data = []", "def call_api_put_data(self, today, covid_data, summary_data): headers = { 'Content-type': 'application/json',", "'application/json', } data = json.dumps({\"date\": today, \"daily_test\": tests}) response_tests =", "daily_covidgr_html_content.text for elem in get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+' match =", "= self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return data def scrape_page_content(self): page", "BeautifulSoup(page.content, 'html.parser') return soup def scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url) soup", "headers = { 'Content-type': 'application/json', } data = json.dumps( {\"date\":", "tests_data = self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0], tests_data[1])", "json class CovidScraper: def __init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url =", "summary_data = [] all_cases_covidgr_html_content = soup.find_all( 'div', class_='maincounter-number') for item", "covid_data[1]}) sum_data = json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]})", "self.summary_data= [] def scrape_data(self): data = [] self.today = str(date.today())", "BeautifulSoup from datetime import date from lxml import html import", "self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data = self.get_tests_per_day(soup_test_page) if", "= re.sub( regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data = summary_data def", "if country_code.text == 'GRC': today_tests = html_content[10].text total_tests = html_content[8].text", "tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return data def", "summary_data def get_tests_per_day(self, tree): html_content = tree.find('tr', id='LC34').find_all('td') country_code =", "html_content = tree.find('tr', id='LC34').find_all('td') country_code = html_content[1] date_test = html_content[3].text", "html_content[8].text return [date_test, today_tests] def call_api_post_tested_covid_data(self, today, tests): headers =", "return [response.json(), response_sum.json()] if __name__ == '__main__': cs = CovidScraper()", "def scrape_page_content(self): page = requests.get(self.scrape_url) soup = BeautifulSoup(page.content, 'html.parser') return", "data.append(post_daily_tests_covid_data) return data def scrape_page_content(self): page = requests.get(self.scrape_url) soup =", "def call_api_post_tested_covid_data(self, today, tests): headers = { 'Content-type': 'application/json', }", "= [] self.today = str(date.today()) soup = self.scrape_page_content() soup_test_page =", "call_api_post_tested_covid_data(self, today, tests): headers = { 'Content-type': 'application/json', } data", "today, \"cases\": covid_data[0], \"deaths\": covid_data[1]}) sum_data = json.dumps( {\"sum_cases\": summary_data[0],", "self.get_summary_data(soup) if self.summary_data and self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data( self.today, self.covid_data,", "data = json.dumps( {\"date\": today, \"cases\": covid_data[0], \"deaths\": covid_data[1]}) sum_data", "summary_data[2]}) response = requests.post(self.api_url, headers=headers, data=data) response_sum = requests.put( self.api_sum_url,", "if match: covid_data.append(elem) self.covid_data = covid_data def get_summary_data(self, soup): summary_data", "soup): covid_data = [] daily_covidgr_html_content = soup.find('li', class_='news_li') get_daily_covidgr_text =", "get_tests_per_day(self, tree): html_content = tree.find('tr', id='LC34').find_all('td') country_code = html_content[1] date_test", "id='LC34').find_all('td') country_code = html_content[1] date_test = html_content[3].text if country_code.text ==", "data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data = self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data =", "'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv'", "= json.dumps( {\"date\": today, \"cases\": covid_data[0], \"deaths\": covid_data[1]}) sum_data =", "summary_data[1], \"sum_recovered\": summary_data[2]}) response = requests.post(self.api_url, headers=headers, data=data) response_sum =", "requests.get(self.scrape_url) soup = BeautifulSoup(page.content, 'html.parser') return soup def scrape_page_content_contains_tests(self): page", "import date from lxml import html import requests import re", "= requests.post(self.api_url, headers=headers, data=data) response_sum = requests.put( self.api_sum_url, headers=headers, data=sum_data)", "today_tests] def call_api_post_tested_covid_data(self, today, tests): headers = { 'Content-type': 'application/json',", "= BeautifulSoup(page.content, 'html.parser') return soup def scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url)", "= requests.put( self.api_sum_url, headers=headers, data=sum_data) return [response.json(), response_sum.json()] if __name__", "} data = json.dumps({\"date\": today, \"daily_test\": tests}) response_tests = requests.post(", "re.findall(regex, elem) if match: covid_data.append(elem) self.covid_data = covid_data def get_summary_data(self,", "in range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s' all_cases_data = re.sub( regex, '',", "'html.parser') return soup def scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url) soup =", "soup.find_all( 'div', class_='maincounter-number') for item in range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s'", "'Content-type': 'application/json', } data = json.dumps( {\"date\": today, \"cases\": covid_data[0],", "scrape_page_content(self): page = requests.get(self.scrape_url) soup = BeautifulSoup(page.content, 'html.parser') return soup", "headers=headers, data=sum_data) return [response.json(), response_sum.json()] if __name__ == '__main__': cs", "covid_data = [] daily_covidgr_html_content = soup.find('li', class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text", "sum_data = json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]}) response", "self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url", "self.covid_data = covid_data def get_summary_data(self, soup): summary_data = [] all_cases_covidgr_html_content", "and self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data( self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if", "requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content, 'html.parser') return soup def get_daily_data(self, soup):", "elem in get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+' match = re.findall(regex, elem)", "= covid_data def get_summary_data(self, soup): summary_data = [] all_cases_covidgr_html_content =", "def get_daily_data(self, soup): covid_data = [] daily_covidgr_html_content = soup.find('li', class_='news_li')", "= str(date.today()) soup = self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests() if soup:", "get_summary_data(self, soup): summary_data = [] all_cases_covidgr_html_content = soup.find_all( 'div', class_='maincounter-number')", "response_sum.json()] if __name__ == '__main__': cs = CovidScraper() results =", "'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = '' self.covid_data = [] self.summary_data= [] def", "match: covid_data.append(elem) self.covid_data = covid_data def get_summary_data(self, soup): summary_data =", "regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data = summary_data def get_tests_per_day(self, tree):", "data=data) response_sum = requests.put( self.api_sum_url, headers=headers, data=sum_data) return [response.json(), response_sum.json()]", "json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]}) response = requests.post(self.api_url,", "= 'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url =", "def scrape_data(self): data = [] self.today = str(date.today()) soup =", "call_api_put_data(self, today, covid_data, summary_data): headers = { 'Content-type': 'application/json', }", "import requests import re import json class CovidScraper: def __init__(self):", "covid_data[0], \"deaths\": covid_data[1]}) sum_data = json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1],", "{ 'Content-type': 'application/json', } data = json.dumps( {\"date\": today, \"cases\":", "requests.post( self.api_test_url, headers=headers, data=data) return response_tests.json() def call_api_put_data(self, today, covid_data,", "summary_data.append(all_cases_data) self.summary_data = summary_data def get_tests_per_day(self, tree): html_content = tree.find('tr',", "data = [] self.today = str(date.today()) soup = self.scrape_page_content() soup_test_page", "country_code = html_content[1] date_test = html_content[3].text if country_code.text == 'GRC':", "soup def scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content, 'html.parser')", "get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+' match = re.findall(regex, elem) if match:", "datetime import date from lxml import html import requests import", "re import json class CovidScraper: def __init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr'", "= json.dumps({\"date\": today, \"daily_test\": tests}) response_tests = requests.post( self.api_test_url, headers=headers,", "= json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]}) response =", "= soup.find_all( 'div', class_='maincounter-number') for item in range(len(all_cases_covidgr_html_content)): regex =", "summary_data): headers = { 'Content-type': 'application/json', } data = json.dumps(", "if __name__ == '__main__': cs = CovidScraper() results = cs.scrape_data()", "[response.json(), response_sum.json()] if __name__ == '__main__': cs = CovidScraper() results", "self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data( self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page:", "'application/json', } data = json.dumps( {\"date\": today, \"cases\": covid_data[0], \"deaths\":", "\"sum_recovered\": summary_data[2]}) response = requests.post(self.api_url, headers=headers, data=data) response_sum = requests.put(", "'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = ''", "if soup: self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data and self.covid_data: post_daily_and_sum_covid_data =", "soup_test_page = self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data and", "headers=headers, data=data) return response_tests.json() def call_api_put_data(self, today, covid_data, summary_data): headers", "'', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data = summary_data def get_tests_per_day(self, tree): html_content", "= '' self.covid_data = [] self.summary_data= [] def scrape_data(self): data", "range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s' all_cases_data = re.sub( regex, '', all_cases_covidgr_html_content[item].text)", "page = requests.get(self.scrape_url) soup = BeautifulSoup(page.content, 'html.parser') return soup def", "self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = '' self.covid_data", "return soup def get_daily_data(self, soup): covid_data = [] daily_covidgr_html_content =", "= requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content, 'html.parser') return soup def get_daily_data(self,", "json.dumps({\"date\": today, \"daily_test\": tests}) response_tests = requests.post( self.api_test_url, headers=headers, data=data)", "= soup.find('li', class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text for elem in get_daily_covidgr_text.split():", "= 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = '' self.covid_data = [] self.summary_data= []", "self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return", "scrape_page_content_contains_tests(self): page = requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content, 'html.parser') return soup", "page = requests.get(self.scrape_tests_url) soup = BeautifulSoup(page.content, 'html.parser') return soup def", "= [] daily_covidgr_html_content = soup.find('li', class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text for", "re.sub( regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data = summary_data def get_tests_per_day(self,", "soup = BeautifulSoup(page.content, 'html.parser') return soup def scrape_page_content_contains_tests(self): page =", "html_content[3].text if country_code.text == 'GRC': today_tests = html_content[10].text total_tests =", "soup: self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data and self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data(", "= '\\d*(.|)\\d+' match = re.findall(regex, elem) if match: covid_data.append(elem) self.covid_data", "country_code.text == 'GRC': today_tests = html_content[10].text total_tests = html_content[8].text return", "= BeautifulSoup(page.content, 'html.parser') return soup def get_daily_data(self, soup): covid_data =", "def get_summary_data(self, soup): summary_data = [] all_cases_covidgr_html_content = soup.find_all( 'div',", "tree): html_content = tree.find('tr', id='LC34').find_all('td') country_code = html_content[1] date_test =", "[] daily_covidgr_html_content = soup.find('li', class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text for elem", "from bs4 import BeautifulSoup from datetime import date from lxml", "for item in range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s' all_cases_data = re.sub(", "self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data = self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data", "return [date_test, today_tests] def call_api_post_tested_covid_data(self, today, tests): headers = {", "data=data) return response_tests.json() def call_api_put_data(self, today, covid_data, summary_data): headers =", "= self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data and self.covid_data:", "self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today", "all_cases_data = re.sub( regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data = summary_data", "data = json.dumps({\"date\": today, \"daily_test\": tests}) response_tests = requests.post( self.api_test_url,", "today, \"daily_test\": tests}) response_tests = requests.post( self.api_test_url, headers=headers, data=data) return", "covid_data, summary_data): headers = { 'Content-type': 'application/json', } data =", "{\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]}) response = requests.post(self.api_url, headers=headers,", "'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/'", "self.today = '' self.covid_data = [] self.summary_data= [] def scrape_data(self):", "soup_test_page: tests_data = self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0],", "'' self.covid_data = [] self.summary_data= [] def scrape_data(self): data =", "'\\d*(.|)\\d+' match = re.findall(regex, elem) if match: covid_data.append(elem) self.covid_data =", "response_tests.json() def call_api_put_data(self, today, covid_data, summary_data): headers = { 'Content-type':", "for elem in get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+' match = re.findall(regex,", "= [] all_cases_covidgr_html_content = soup.find_all( 'div', class_='maincounter-number') for item in", "self.api_sum_url, headers=headers, data=sum_data) return [response.json(), response_sum.json()] if __name__ == '__main__':", "bs4 import BeautifulSoup from datetime import date from lxml import", "regex = r'(\\n)|\\s' all_cases_data = re.sub( regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data)", "[] self.today = str(date.today()) soup = self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests()", "= self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data)", "'html.parser') return soup def get_daily_data(self, soup): covid_data = [] daily_covidgr_html_content", "= requests.get(self.scrape_url) soup = BeautifulSoup(page.content, 'html.parser') return soup def scrape_page_content_contains_tests(self):", "self.summary_data and self.covid_data: post_daily_and_sum_covid_data = self.call_api_put_data( self.today, self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data)", "response_sum = requests.put( self.api_sum_url, headers=headers, data=sum_data) return [response.json(), response_sum.json()] if", "self.covid_data, self.summary_data) data.append(post_daily_and_sum_covid_data) if soup_test_page: tests_data = self.get_tests_per_day(soup_test_page) if tests_data[0]:", "= html_content[10].text total_tests = html_content[8].text return [date_test, today_tests] def call_api_post_tested_covid_data(self,", "item in range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s' all_cases_data = re.sub( regex,", "requests.put( self.api_sum_url, headers=headers, data=sum_data) return [response.json(), response_sum.json()] if __name__ ==", "class_='maincounter-number') for item in range(len(all_cases_covidgr_html_content)): regex = r'(\\n)|\\s' all_cases_data =", "import re import json class CovidScraper: def __init__(self): self.api_url =", "all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data = summary_data def get_tests_per_day(self, tree): html_content =", "class CovidScraper: def __init__(self): self.api_url = 'http://127.0.0.1:5000/covidgr' self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr'", "in get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+' match = re.findall(regex, elem) if", "self.api_sum_url = 'http://127.0.0.1:5000/summary/covidgr' self.api_test_url = 'http://127.0.0.1:5000/covidgr/tests' self.scrape_url = 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url", "headers=headers, data=data) response_sum = requests.put( self.api_sum_url, headers=headers, data=sum_data) return [response.json(),", "= daily_covidgr_html_content.text for elem in get_daily_covidgr_text.split(): regex = '\\d*(.|)\\d+' match", "match = re.findall(regex, elem) if match: covid_data.append(elem) self.covid_data = covid_data", "self.today = str(date.today()) soup = self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests() if", "soup def get_daily_data(self, soup): covid_data = [] daily_covidgr_html_content = soup.find('li',", "= 'https://www.worldometers.info/coronavirus/country/greece/' self.scrape_tests_url = 'https://github.com/owid/covid-19-data/blob/master/public/data/testing/covid-testing-latest-data-source-details.csv' self.today = '' self.covid_data =", "soup): summary_data = [] all_cases_covidgr_html_content = soup.find_all( 'div', class_='maincounter-number') for", "\"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]}) response = requests.post(self.api_url, headers=headers, data=data) response_sum", "__name__ == '__main__': cs = CovidScraper() results = cs.scrape_data() print(results)", "post_daily_tests_covid_data = self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return data def scrape_page_content(self):", "soup.find('li', class_='news_li') get_daily_covidgr_text = daily_covidgr_html_content.text for elem in get_daily_covidgr_text.split(): regex", "= [] self.summary_data= [] def scrape_data(self): data = [] self.today", "tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return data def scrape_page_content(self): page = requests.get(self.scrape_url)", "= summary_data def get_tests_per_day(self, tree): html_content = tree.find('tr', id='LC34').find_all('td') country_code", "headers = { 'Content-type': 'application/json', } data = json.dumps({\"date\": today,", "from datetime import date from lxml import html import requests", "[] self.summary_data= [] def scrape_data(self): data = [] self.today =", "summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\": summary_data[2]}) response = requests.post(self.api_url, headers=headers, data=data)", "import html import requests import re import json class CovidScraper:", "self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup) self.get_summary_data(soup) if self.summary_data and self.covid_data: post_daily_and_sum_covid_data", "date from lxml import html import requests import re import", "tests_data[1]) data.append(post_daily_tests_covid_data) return data def scrape_page_content(self): page = requests.get(self.scrape_url) soup", "tree.find('tr', id='LC34').find_all('td') country_code = html_content[1] date_test = html_content[3].text if country_code.text", "covid_data def get_summary_data(self, soup): summary_data = [] all_cases_covidgr_html_content = soup.find_all(", "today, covid_data, summary_data): headers = { 'Content-type': 'application/json', } data", "r'(\\n)|\\s' all_cases_data = re.sub( regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data =", "regex = '\\d*(.|)\\d+' match = re.findall(regex, elem) if match: covid_data.append(elem)", "from lxml import html import requests import re import json", "self.call_api_post_tested_covid_data( tests_data[0], tests_data[1]) data.append(post_daily_tests_covid_data) return data def scrape_page_content(self): page =", "[] def scrape_data(self): data = [] self.today = str(date.today()) soup", "[] all_cases_covidgr_html_content = soup.find_all( 'div', class_='maincounter-number') for item in range(len(all_cases_covidgr_html_content)):", "today, tests): headers = { 'Content-type': 'application/json', } data =", "data=sum_data) return [response.json(), response_sum.json()] if __name__ == '__main__': cs =", "\"cases\": covid_data[0], \"deaths\": covid_data[1]}) sum_data = json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\":", "= re.findall(regex, elem) if match: covid_data.append(elem) self.covid_data = covid_data def", "requests.post(self.api_url, headers=headers, data=data) response_sum = requests.put( self.api_sum_url, headers=headers, data=sum_data) return", "data def scrape_page_content(self): page = requests.get(self.scrape_url) soup = BeautifulSoup(page.content, 'html.parser')", "\"daily_test\": tests}) response_tests = requests.post( self.api_test_url, headers=headers, data=data) return response_tests.json()", "= html_content[1] date_test = html_content[3].text if country_code.text == 'GRC': today_tests", "total_tests = html_content[8].text return [date_test, today_tests] def call_api_post_tested_covid_data(self, today, tests):", "if soup_test_page: tests_data = self.get_tests_per_day(soup_test_page) if tests_data[0]: post_daily_tests_covid_data = self.call_api_post_tested_covid_data(", "html import requests import re import json class CovidScraper: def", "soup = self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup) self.get_summary_data(soup)", "\"deaths\": covid_data[1]}) sum_data = json.dumps( {\"sum_cases\": summary_data[0], \"sum_deaths\": summary_data[1], \"sum_recovered\":", "soup = BeautifulSoup(page.content, 'html.parser') return soup def get_daily_data(self, soup): covid_data", "today_tests = html_content[10].text total_tests = html_content[8].text return [date_test, today_tests] def", "self.covid_data = [] self.summary_data= [] def scrape_data(self): data = []", "str(date.today()) soup = self.scrape_page_content() soup_test_page = self.scrape_page_content_contains_tests() if soup: self.get_daily_data(soup)", "= r'(\\n)|\\s' all_cases_data = re.sub( regex, '', all_cases_covidgr_html_content[item].text) summary_data.append(all_cases_data) self.summary_data", "html_content[10].text total_tests = html_content[8].text return [date_test, today_tests] def call_api_post_tested_covid_data(self, today," ]
[ "* float(wrel) ) splt = os.path.splitext(datei) newfilename = splt[0] +", "ext == \".jpg\" or ext == \".png\" or ext ==", "datei) def main(): files = os.listdir('.') files = filter(isimg, files)", "def isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower() if ext == \".jpg\" or", "= os.path.splitext(isitimg)[1].lower() if ext == \".jpg\" or ext == \".png\"", "\".png\" or ext == \".gif\": return True return False def", "splt[0] + splt[1].lower() img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename, quality=100,", "= os.listdir('.') files = filter(isimg, files) for f in files:", "os.path.splitext(datei) newfilename = splt[0] + splt[1].lower() img = img.resize((DATEI_WEB_GROSSE, habs),", "img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True, progressive=True) if newfilename !=", "+ splt[1].lower() img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True,", "newfilename = splt[0] + splt[1].lower() img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS)", "!= datei: os.rename(newfilename, datei) def main(): files = os.listdir('.') files", "splt = os.path.splitext(datei) newfilename = splt[0] + splt[1].lower() img =", "import Image DATEI_WEB_GROSSE = 700 def isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower()", "int( float(img.size[1]) * float(wrel) ) splt = os.path.splitext(datei) newfilename =", "files = filter(isimg, files) for f in files: print f", "wrel = DATEI_WEB_GROSSE / float(img.size[0]) habs = int( float(img.size[1]) *", "= Image.open(datei) wrel = DATEI_WEB_GROSSE / float(img.size[0]) habs = int(", "return False def bearbeiten(datei): img = Image.open(datei) wrel = DATEI_WEB_GROSSE", "filter(isimg, files) for f in files: print f bearbeiten(f) if", "#!/usr/bin/python2.7 import os from PIL import Image DATEI_WEB_GROSSE = 700", "isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower() if ext == \".jpg\" or ext", "DATEI_WEB_GROSSE = 700 def isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower() if ext", ") splt = os.path.splitext(datei) newfilename = splt[0] + splt[1].lower() img", "700 def isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower() if ext == \".jpg\"", "\".gif\": return True return False def bearbeiten(datei): img = Image.open(datei)", "= splt[0] + splt[1].lower() img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename,", "return True return False def bearbeiten(datei): img = Image.open(datei) wrel", "os.listdir('.') files = filter(isimg, files) for f in files: print", "os.path.splitext(isitimg)[1].lower() if ext == \".jpg\" or ext == \".png\" or", "or ext == \".gif\": return True return False def bearbeiten(datei):", "Image DATEI_WEB_GROSSE = 700 def isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower() if", "= int( float(img.size[1]) * float(wrel) ) splt = os.path.splitext(datei) newfilename", "bearbeiten(datei): img = Image.open(datei) wrel = DATEI_WEB_GROSSE / float(img.size[0]) habs", "= img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True, progressive=True) if newfilename", "quality=100, optimize=True, progressive=True) if newfilename != datei: os.rename(newfilename, datei) def", "True return False def bearbeiten(datei): img = Image.open(datei) wrel =", "f in files: print f bearbeiten(f) if __name__ == '__main__':", "= 700 def isimg(isitimg): ext = os.path.splitext(isitimg)[1].lower() if ext ==", "if ext == \".jpg\" or ext == \".png\" or ext", "DATEI_WEB_GROSSE / float(img.size[0]) habs = int( float(img.size[1]) * float(wrel) )", "img.save(newfilename, quality=100, optimize=True, progressive=True) if newfilename != datei: os.rename(newfilename, datei)", "for f in files: print f bearbeiten(f) if __name__ ==", "= os.path.splitext(datei) newfilename = splt[0] + splt[1].lower() img = img.resize((DATEI_WEB_GROSSE,", "files) for f in files: print f bearbeiten(f) if __name__", "os.rename(newfilename, datei) def main(): files = os.listdir('.') files = filter(isimg,", "== \".jpg\" or ext == \".png\" or ext == \".gif\":", "optimize=True, progressive=True) if newfilename != datei: os.rename(newfilename, datei) def main():", "PIL import Image DATEI_WEB_GROSSE = 700 def isimg(isitimg): ext =", "ext == \".png\" or ext == \".gif\": return True return", "splt[1].lower() img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True, progressive=True)", "img = Image.open(datei) wrel = DATEI_WEB_GROSSE / float(img.size[0]) habs =", "def main(): files = os.listdir('.') files = filter(isimg, files) for", "img = img.resize((DATEI_WEB_GROSSE, habs), Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True, progressive=True) if", "habs), Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True, progressive=True) if newfilename != datei:", "habs = int( float(img.size[1]) * float(wrel) ) splt = os.path.splitext(datei)", "/ float(img.size[0]) habs = int( float(img.size[1]) * float(wrel) ) splt", "False def bearbeiten(datei): img = Image.open(datei) wrel = DATEI_WEB_GROSSE /", "== \".png\" or ext == \".gif\": return True return False", "float(img.size[0]) habs = int( float(img.size[1]) * float(wrel) ) splt =", "\".jpg\" or ext == \".png\" or ext == \".gif\": return", "float(wrel) ) splt = os.path.splitext(datei) newfilename = splt[0] + splt[1].lower()", "== \".gif\": return True return False def bearbeiten(datei): img =", "os from PIL import Image DATEI_WEB_GROSSE = 700 def isimg(isitimg):", "ext = os.path.splitext(isitimg)[1].lower() if ext == \".jpg\" or ext ==", "files = os.listdir('.') files = filter(isimg, files) for f in", "main(): files = os.listdir('.') files = filter(isimg, files) for f", "in files: print f bearbeiten(f) if __name__ == '__main__': main()", "import os from PIL import Image DATEI_WEB_GROSSE = 700 def", "or ext == \".png\" or ext == \".gif\": return True", "def bearbeiten(datei): img = Image.open(datei) wrel = DATEI_WEB_GROSSE / float(img.size[0])", "Image.open(datei) wrel = DATEI_WEB_GROSSE / float(img.size[0]) habs = int( float(img.size[1])", "from PIL import Image DATEI_WEB_GROSSE = 700 def isimg(isitimg): ext", "float(img.size[1]) * float(wrel) ) splt = os.path.splitext(datei) newfilename = splt[0]", "Image.ANTIALIAS) img.save(newfilename, quality=100, optimize=True, progressive=True) if newfilename != datei: os.rename(newfilename,", "ext == \".gif\": return True return False def bearbeiten(datei): img", "if newfilename != datei: os.rename(newfilename, datei) def main(): files =", "newfilename != datei: os.rename(newfilename, datei) def main(): files = os.listdir('.')", "datei: os.rename(newfilename, datei) def main(): files = os.listdir('.') files =", "progressive=True) if newfilename != datei: os.rename(newfilename, datei) def main(): files", "= DATEI_WEB_GROSSE / float(img.size[0]) habs = int( float(img.size[1]) * float(wrel)", "= filter(isimg, files) for f in files: print f bearbeiten(f)" ]
[ "async def flamewreath(self, ctx): \"\"\"I will not move when Flame", "import commands class WowCog: \"\"\"Custom Cog that had commands for", "def _play(self, url, ctx): \"\"\"Helper for aliasing Play in the", "discord.ext import commands class WowCog: \"\"\"Custom Cog that had commands", "\"\"\"Helper for aliasing Play in the Audio module\"\"\" audio =", "ctx): \"\"\"Helper for aliasing Play in the Audio module\"\"\" audio", "self.bot.say(\"Audio module required. Load with: {}load audio\".format(ctx.prefix)) return await ctx.invoke(audio.play,", "return await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async def flamewreath(self, ctx):", "url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async def flamewreath(self, ctx): \"\"\"I will not", "required. Load with: {}load audio\".format(ctx.prefix)) return await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True,", "def flamewreath(self, ctx): \"\"\"I will not move when Flame Wreath", "Play in the Audio module\"\"\" audio = self.bot.get_cog('Audio') if not", "\"\"\"I will not move when Flame Wreath is cast!\"\"\" await", "had commands for WoW Memes\"\"\" def __init__(self, bot): self.bot =", "audio\".format(ctx.prefix)) return await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async def flamewreath(self,", "Load with: {}load audio\".format(ctx.prefix)) return await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True)", "await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async def flamewreath(self, ctx): \"\"\"I", "bot async def _play(self, url, ctx): \"\"\"Helper for aliasing Play", "for WoW Memes\"\"\" def __init__(self, bot): self.bot = bot async", "\"\"\"Custom Cog that had commands for WoW Memes\"\"\" def __init__(self,", "from discord.ext import commands class WowCog: \"\"\"Custom Cog that had", "_play(self, url, ctx): \"\"\"Helper for aliasing Play in the Audio", "self.bot.get_cog('Audio') if not audio: await self.bot.say(\"Audio module required. Load with:", "module required. Load with: {}load audio\".format(ctx.prefix)) return await ctx.invoke(audio.play, url_or_search_terms=url)", "when Flame Wreath is cast!\"\"\" await self._play(\"https://www.youtube.com/watch?v=gcA6y7sxKcA\", ctx) def setup(bot):", "no_pm=True) async def flamewreath(self, ctx): \"\"\"I will not move when", "Flame Wreath is cast!\"\"\" await self._play(\"https://www.youtube.com/watch?v=gcA6y7sxKcA\", ctx) def setup(bot): bot.add_cog(WowCog(bot))", "url, ctx): \"\"\"Helper for aliasing Play in the Audio module\"\"\"", "aliasing Play in the Audio module\"\"\" audio = self.bot.get_cog('Audio') if", "ctx): \"\"\"I will not move when Flame Wreath is cast!\"\"\"", "async def _play(self, url, ctx): \"\"\"Helper for aliasing Play in", "__init__(self, bot): self.bot = bot async def _play(self, url, ctx):", "await self.bot.say(\"Audio module required. Load with: {}load audio\".format(ctx.prefix)) return await", "@commands.command(pass_context=True, no_pm=True) async def flamewreath(self, ctx): \"\"\"I will not move", "Memes\"\"\" def __init__(self, bot): self.bot = bot async def _play(self,", "flamewreath(self, ctx): \"\"\"I will not move when Flame Wreath is", "commands for WoW Memes\"\"\" def __init__(self, bot): self.bot = bot", "WoW Memes\"\"\" def __init__(self, bot): self.bot = bot async def", "ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async def flamewreath(self, ctx): \"\"\"I will", "discord from discord.ext import commands class WowCog: \"\"\"Custom Cog that", "import discord from discord.ext import commands class WowCog: \"\"\"Custom Cog", "that had commands for WoW Memes\"\"\" def __init__(self, bot): self.bot", "WowCog: \"\"\"Custom Cog that had commands for WoW Memes\"\"\" def", "move when Flame Wreath is cast!\"\"\" await self._play(\"https://www.youtube.com/watch?v=gcA6y7sxKcA\", ctx) def", "bot): self.bot = bot async def _play(self, url, ctx): \"\"\"Helper", "audio = self.bot.get_cog('Audio') if not audio: await self.bot.say(\"Audio module required.", "{}load audio\".format(ctx.prefix)) return await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async def", "will not move when Flame Wreath is cast!\"\"\" await self._play(\"https://www.youtube.com/watch?v=gcA6y7sxKcA\",", "in the Audio module\"\"\" audio = self.bot.get_cog('Audio') if not audio:", "class WowCog: \"\"\"Custom Cog that had commands for WoW Memes\"\"\"", "audio: await self.bot.say(\"Audio module required. Load with: {}load audio\".format(ctx.prefix)) return", "commands class WowCog: \"\"\"Custom Cog that had commands for WoW", "Cog that had commands for WoW Memes\"\"\" def __init__(self, bot):", "module\"\"\" audio = self.bot.get_cog('Audio') if not audio: await self.bot.say(\"Audio module", "not move when Flame Wreath is cast!\"\"\" await self._play(\"https://www.youtube.com/watch?v=gcA6y7sxKcA\", ctx)", "the Audio module\"\"\" audio = self.bot.get_cog('Audio') if not audio: await", "def __init__(self, bot): self.bot = bot async def _play(self, url,", "Audio module\"\"\" audio = self.bot.get_cog('Audio') if not audio: await self.bot.say(\"Audio", "not audio: await self.bot.say(\"Audio module required. Load with: {}load audio\".format(ctx.prefix))", "self.bot = bot async def _play(self, url, ctx): \"\"\"Helper for", "with: {}load audio\".format(ctx.prefix)) return await ctx.invoke(audio.play, url_or_search_terms=url) @commands.command(pass_context=True, no_pm=True) async", "= self.bot.get_cog('Audio') if not audio: await self.bot.say(\"Audio module required. Load", "for aliasing Play in the Audio module\"\"\" audio = self.bot.get_cog('Audio')", "= bot async def _play(self, url, ctx): \"\"\"Helper for aliasing", "if not audio: await self.bot.say(\"Audio module required. Load with: {}load" ]